text
stringlengths 13
1.77M
| id
stringlengths 22
127
| metadata
dict | __index_level_0__
int64 0
28
|
---|---|---|---|
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.2, generator: @autorest/[email protected])
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.core.exceptions import HttpResponseError
import msrest.serialization
class ACIAdvanceSettings(msrest.serialization.Model):
"""ACIAdvanceSettings.
:ivar container_resource_requirements:
:vartype container_resource_requirements: ~flow.models.ContainerResourceRequirements
:ivar app_insights_enabled:
:vartype app_insights_enabled: bool
:ivar ssl_enabled:
:vartype ssl_enabled: bool
:ivar ssl_certificate:
:vartype ssl_certificate: str
:ivar ssl_key:
:vartype ssl_key: str
:ivar c_name:
:vartype c_name: str
:ivar dns_name_label:
:vartype dns_name_label: str
"""
_attribute_map = {
'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
'ssl_enabled': {'key': 'sslEnabled', 'type': 'bool'},
'ssl_certificate': {'key': 'sslCertificate', 'type': 'str'},
'ssl_key': {'key': 'sslKey', 'type': 'str'},
'c_name': {'key': 'cName', 'type': 'str'},
'dns_name_label': {'key': 'dnsNameLabel', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword container_resource_requirements:
:paramtype container_resource_requirements: ~flow.models.ContainerResourceRequirements
:keyword app_insights_enabled:
:paramtype app_insights_enabled: bool
:keyword ssl_enabled:
:paramtype ssl_enabled: bool
:keyword ssl_certificate:
:paramtype ssl_certificate: str
:keyword ssl_key:
:paramtype ssl_key: str
:keyword c_name:
:paramtype c_name: str
:keyword dns_name_label:
:paramtype dns_name_label: str
"""
super(ACIAdvanceSettings, self).__init__(**kwargs)
self.container_resource_requirements = kwargs.get('container_resource_requirements', None)
self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
self.ssl_enabled = kwargs.get('ssl_enabled', None)
self.ssl_certificate = kwargs.get('ssl_certificate', None)
self.ssl_key = kwargs.get('ssl_key', None)
self.c_name = kwargs.get('c_name', None)
self.dns_name_label = kwargs.get('dns_name_label', None)
class Activate(msrest.serialization.Model):
"""Activate.
:ivar when:
:vartype when: str
:ivar is_property: Anything.
:vartype is_property: any
"""
_attribute_map = {
'when': {'key': 'when', 'type': 'str'},
'is_property': {'key': 'is', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword when:
:paramtype when: str
:keyword is_property: Anything.
:paramtype is_property: any
"""
super(Activate, self).__init__(**kwargs)
self.when = kwargs.get('when', None)
self.is_property = kwargs.get('is_property', None)
class AdditionalErrorInfo(msrest.serialization.Model):
"""AdditionalErrorInfo.
:ivar type:
:vartype type: str
:ivar info: Anything.
:vartype info: any
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'info': {'key': 'info', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type:
:paramtype type: str
:keyword info: Anything.
:paramtype info: any
"""
super(AdditionalErrorInfo, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.info = kwargs.get('info', None)
class AdhocTriggerScheduledCommandJobRequest(msrest.serialization.Model):
"""AdhocTriggerScheduledCommandJobRequest.
:ivar job_name:
:vartype job_name: str
:ivar job_display_name:
:vartype job_display_name: str
:ivar trigger_time_string:
:vartype trigger_time_string: str
"""
_attribute_map = {
'job_name': {'key': 'jobName', 'type': 'str'},
'job_display_name': {'key': 'jobDisplayName', 'type': 'str'},
'trigger_time_string': {'key': 'triggerTimeString', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_name:
:paramtype job_name: str
:keyword job_display_name:
:paramtype job_display_name: str
:keyword trigger_time_string:
:paramtype trigger_time_string: str
"""
super(AdhocTriggerScheduledCommandJobRequest, self).__init__(**kwargs)
self.job_name = kwargs.get('job_name', None)
self.job_display_name = kwargs.get('job_display_name', None)
self.trigger_time_string = kwargs.get('trigger_time_string', None)
class AdhocTriggerScheduledSparkJobRequest(msrest.serialization.Model):
"""AdhocTriggerScheduledSparkJobRequest.
:ivar job_name:
:vartype job_name: str
:ivar job_display_name:
:vartype job_display_name: str
:ivar trigger_time_string:
:vartype trigger_time_string: str
"""
_attribute_map = {
'job_name': {'key': 'jobName', 'type': 'str'},
'job_display_name': {'key': 'jobDisplayName', 'type': 'str'},
'trigger_time_string': {'key': 'triggerTimeString', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_name:
:paramtype job_name: str
:keyword job_display_name:
:paramtype job_display_name: str
:keyword trigger_time_string:
:paramtype trigger_time_string: str
"""
super(AdhocTriggerScheduledSparkJobRequest, self).__init__(**kwargs)
self.job_name = kwargs.get('job_name', None)
self.job_display_name = kwargs.get('job_display_name', None)
self.trigger_time_string = kwargs.get('trigger_time_string', None)
class AetherAmlDataset(msrest.serialization.Model):
"""AetherAmlDataset.
:ivar registered_data_set_reference:
:vartype registered_data_set_reference: ~flow.models.AetherRegisteredDataSetReference
:ivar saved_data_set_reference:
:vartype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'registered_data_set_reference': {'key': 'registeredDataSetReference', 'type': 'AetherRegisteredDataSetReference'},
'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'AetherSavedDataSetReference'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword registered_data_set_reference:
:paramtype registered_data_set_reference: ~flow.models.AetherRegisteredDataSetReference
:keyword saved_data_set_reference:
:paramtype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(AetherAmlDataset, self).__init__(**kwargs)
self.registered_data_set_reference = kwargs.get('registered_data_set_reference', None)
self.saved_data_set_reference = kwargs.get('saved_data_set_reference', None)
self.additional_transformations = kwargs.get('additional_transformations', None)
class AetherAmlSparkCloudSetting(msrest.serialization.Model):
"""AetherAmlSparkCloudSetting.
:ivar entry:
:vartype entry: ~flow.models.AetherEntrySetting
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar py_files:
:vartype py_files: list[str]
:ivar driver_memory:
:vartype driver_memory: str
:ivar driver_cores:
:vartype driver_cores: int
:ivar executor_memory:
:vartype executor_memory: str
:ivar executor_cores:
:vartype executor_cores: int
:ivar number_executors:
:vartype number_executors: int
:ivar environment_asset_id:
:vartype environment_asset_id: str
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar inline_environment_definition_string:
:vartype inline_environment_definition_string: str
:ivar conf: Dictionary of :code:`<string>`.
:vartype conf: dict[str, str]
:ivar compute:
:vartype compute: str
:ivar resources:
:vartype resources: ~flow.models.AetherResourcesSetting
:ivar identity:
:vartype identity: ~flow.models.AetherIdentitySetting
"""
_attribute_map = {
'entry': {'key': 'entry', 'type': 'AetherEntrySetting'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'driver_memory': {'key': 'driverMemory', 'type': 'str'},
'driver_cores': {'key': 'driverCores', 'type': 'int'},
'executor_memory': {'key': 'executorMemory', 'type': 'str'},
'executor_cores': {'key': 'executorCores', 'type': 'int'},
'number_executors': {'key': 'numberExecutors', 'type': 'int'},
'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'inline_environment_definition_string': {'key': 'inlineEnvironmentDefinitionString', 'type': 'str'},
'conf': {'key': 'conf', 'type': '{str}'},
'compute': {'key': 'compute', 'type': 'str'},
'resources': {'key': 'resources', 'type': 'AetherResourcesSetting'},
'identity': {'key': 'identity', 'type': 'AetherIdentitySetting'},
}
def __init__(
self,
**kwargs
):
"""
:keyword entry:
:paramtype entry: ~flow.models.AetherEntrySetting
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword py_files:
:paramtype py_files: list[str]
:keyword driver_memory:
:paramtype driver_memory: str
:keyword driver_cores:
:paramtype driver_cores: int
:keyword executor_memory:
:paramtype executor_memory: str
:keyword executor_cores:
:paramtype executor_cores: int
:keyword number_executors:
:paramtype number_executors: int
:keyword environment_asset_id:
:paramtype environment_asset_id: str
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword inline_environment_definition_string:
:paramtype inline_environment_definition_string: str
:keyword conf: Dictionary of :code:`<string>`.
:paramtype conf: dict[str, str]
:keyword compute:
:paramtype compute: str
:keyword resources:
:paramtype resources: ~flow.models.AetherResourcesSetting
:keyword identity:
:paramtype identity: ~flow.models.AetherIdentitySetting
"""
super(AetherAmlSparkCloudSetting, self).__init__(**kwargs)
self.entry = kwargs.get('entry', None)
self.files = kwargs.get('files', None)
self.archives = kwargs.get('archives', None)
self.jars = kwargs.get('jars', None)
self.py_files = kwargs.get('py_files', None)
self.driver_memory = kwargs.get('driver_memory', None)
self.driver_cores = kwargs.get('driver_cores', None)
self.executor_memory = kwargs.get('executor_memory', None)
self.executor_cores = kwargs.get('executor_cores', None)
self.number_executors = kwargs.get('number_executors', None)
self.environment_asset_id = kwargs.get('environment_asset_id', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.inline_environment_definition_string = kwargs.get('inline_environment_definition_string', None)
self.conf = kwargs.get('conf', None)
self.compute = kwargs.get('compute', None)
self.resources = kwargs.get('resources', None)
self.identity = kwargs.get('identity', None)
class AetherAPCloudConfiguration(msrest.serialization.Model):
"""AetherAPCloudConfiguration.
:ivar referenced_ap_module_guid:
:vartype referenced_ap_module_guid: str
:ivar user_alias:
:vartype user_alias: str
:ivar aether_module_type:
:vartype aether_module_type: str
:ivar allow_overwrite:
:vartype allow_overwrite: bool
:ivar destination_expiration_days:
:vartype destination_expiration_days: int
:ivar should_respect_line_boundaries:
:vartype should_respect_line_boundaries: bool
"""
_attribute_map = {
'referenced_ap_module_guid': {'key': 'referencedAPModuleGuid', 'type': 'str'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'aether_module_type': {'key': 'aetherModuleType', 'type': 'str'},
'allow_overwrite': {'key': 'allowOverwrite', 'type': 'bool'},
'destination_expiration_days': {'key': 'destinationExpirationDays', 'type': 'int'},
'should_respect_line_boundaries': {'key': 'shouldRespectLineBoundaries', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword referenced_ap_module_guid:
:paramtype referenced_ap_module_guid: str
:keyword user_alias:
:paramtype user_alias: str
:keyword aether_module_type:
:paramtype aether_module_type: str
:keyword allow_overwrite:
:paramtype allow_overwrite: bool
:keyword destination_expiration_days:
:paramtype destination_expiration_days: int
:keyword should_respect_line_boundaries:
:paramtype should_respect_line_boundaries: bool
"""
super(AetherAPCloudConfiguration, self).__init__(**kwargs)
self.referenced_ap_module_guid = kwargs.get('referenced_ap_module_guid', None)
self.user_alias = kwargs.get('user_alias', None)
self.aether_module_type = kwargs.get('aether_module_type', None)
self.allow_overwrite = kwargs.get('allow_overwrite', None)
self.destination_expiration_days = kwargs.get('destination_expiration_days', None)
self.should_respect_line_boundaries = kwargs.get('should_respect_line_boundaries', None)
class AetherArgumentAssignment(msrest.serialization.Model):
"""AetherArgumentAssignment.
:ivar value_type: Possible values include: "Literal", "Parameter", "Input", "Output",
"NestedList", "StringInterpolationList".
:vartype value_type: str or ~flow.models.AetherArgumentValueType
:ivar value:
:vartype value: str
:ivar nested_argument_list:
:vartype nested_argument_list: list[~flow.models.AetherArgumentAssignment]
:ivar string_interpolation_argument_list:
:vartype string_interpolation_argument_list: list[~flow.models.AetherArgumentAssignment]
"""
_attribute_map = {
'value_type': {'key': 'valueType', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'nested_argument_list': {'key': 'nestedArgumentList', 'type': '[AetherArgumentAssignment]'},
'string_interpolation_argument_list': {'key': 'stringInterpolationArgumentList', 'type': '[AetherArgumentAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value_type: Possible values include: "Literal", "Parameter", "Input", "Output",
"NestedList", "StringInterpolationList".
:paramtype value_type: str or ~flow.models.AetherArgumentValueType
:keyword value:
:paramtype value: str
:keyword nested_argument_list:
:paramtype nested_argument_list: list[~flow.models.AetherArgumentAssignment]
:keyword string_interpolation_argument_list:
:paramtype string_interpolation_argument_list: list[~flow.models.AetherArgumentAssignment]
"""
super(AetherArgumentAssignment, self).__init__(**kwargs)
self.value_type = kwargs.get('value_type', None)
self.value = kwargs.get('value', None)
self.nested_argument_list = kwargs.get('nested_argument_list', None)
self.string_interpolation_argument_list = kwargs.get('string_interpolation_argument_list', None)
class AetherAssetDefinition(msrest.serialization.Model):
"""AetherAssetDefinition.
:ivar path:
:vartype path: str
:ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:vartype type: str or ~flow.models.AetherAssetType
:ivar asset_id:
:vartype asset_id: str
:ivar initial_asset_id:
:vartype initial_asset_id: str
:ivar serialized_asset_id:
:vartype serialized_asset_id: str
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'asset_id': {'key': 'assetId', 'type': 'str'},
'initial_asset_id': {'key': 'initialAssetId', 'type': 'str'},
'serialized_asset_id': {'key': 'serializedAssetId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword path:
:paramtype path: str
:keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:paramtype type: str or ~flow.models.AetherAssetType
:keyword asset_id:
:paramtype asset_id: str
:keyword initial_asset_id:
:paramtype initial_asset_id: str
:keyword serialized_asset_id:
:paramtype serialized_asset_id: str
"""
super(AetherAssetDefinition, self).__init__(**kwargs)
self.path = kwargs.get('path', None)
self.type = kwargs.get('type', None)
self.asset_id = kwargs.get('asset_id', None)
self.initial_asset_id = kwargs.get('initial_asset_id', None)
self.serialized_asset_id = kwargs.get('serialized_asset_id', None)
class AetherAssetOutputSettings(msrest.serialization.Model):
"""AetherAssetOutputSettings.
:ivar path:
:vartype path: str
:ivar path_parameter_assignment:
:vartype path_parameter_assignment: ~flow.models.AetherParameterAssignment
:ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:vartype type: str or ~flow.models.AetherAssetType
:ivar options: This is a dictionary.
:vartype options: dict[str, str]
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'path_parameter_assignment': {'key': 'PathParameterAssignment', 'type': 'AetherParameterAssignment'},
'type': {'key': 'type', 'type': 'str'},
'options': {'key': 'options', 'type': '{str}'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword path:
:paramtype path: str
:keyword path_parameter_assignment:
:paramtype path_parameter_assignment: ~flow.models.AetherParameterAssignment
:keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:paramtype type: str or ~flow.models.AetherAssetType
:keyword options: This is a dictionary.
:paramtype options: dict[str, str]
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
"""
super(AetherAssetOutputSettings, self).__init__(**kwargs)
self.path = kwargs.get('path', None)
self.path_parameter_assignment = kwargs.get('path_parameter_assignment', None)
self.type = kwargs.get('type', None)
self.options = kwargs.get('options', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
class AetherAutoFeaturizeConfiguration(msrest.serialization.Model):
"""AetherAutoFeaturizeConfiguration.
:ivar featurization_config:
:vartype featurization_config: ~flow.models.AetherFeaturizationSettings
"""
_attribute_map = {
'featurization_config': {'key': 'featurizationConfig', 'type': 'AetherFeaturizationSettings'},
}
def __init__(
self,
**kwargs
):
"""
:keyword featurization_config:
:paramtype featurization_config: ~flow.models.AetherFeaturizationSettings
"""
super(AetherAutoFeaturizeConfiguration, self).__init__(**kwargs)
self.featurization_config = kwargs.get('featurization_config', None)
class AetherAutoMLComponentConfiguration(msrest.serialization.Model):
"""AetherAutoMLComponentConfiguration.
:ivar auto_train_config:
:vartype auto_train_config: ~flow.models.AetherAutoTrainConfiguration
:ivar auto_featurize_config:
:vartype auto_featurize_config: ~flow.models.AetherAutoFeaturizeConfiguration
"""
_attribute_map = {
'auto_train_config': {'key': 'autoTrainConfig', 'type': 'AetherAutoTrainConfiguration'},
'auto_featurize_config': {'key': 'autoFeaturizeConfig', 'type': 'AetherAutoFeaturizeConfiguration'},
}
def __init__(
self,
**kwargs
):
"""
:keyword auto_train_config:
:paramtype auto_train_config: ~flow.models.AetherAutoTrainConfiguration
:keyword auto_featurize_config:
:paramtype auto_featurize_config: ~flow.models.AetherAutoFeaturizeConfiguration
"""
super(AetherAutoMLComponentConfiguration, self).__init__(**kwargs)
self.auto_train_config = kwargs.get('auto_train_config', None)
self.auto_featurize_config = kwargs.get('auto_featurize_config', None)
class AetherAutoTrainConfiguration(msrest.serialization.Model):
"""AetherAutoTrainConfiguration.
:ivar general_settings:
:vartype general_settings: ~flow.models.AetherGeneralSettings
:ivar limit_settings:
:vartype limit_settings: ~flow.models.AetherLimitSettings
:ivar data_settings:
:vartype data_settings: ~flow.models.AetherDataSettings
:ivar forecasting_settings:
:vartype forecasting_settings: ~flow.models.AetherForecastingSettings
:ivar training_settings:
:vartype training_settings: ~flow.models.AetherTrainingSettings
:ivar sweep_settings:
:vartype sweep_settings: ~flow.models.AetherSweepSettings
:ivar image_model_settings: Dictionary of :code:`<any>`.
:vartype image_model_settings: dict[str, any]
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar compute_configuration:
:vartype compute_configuration: ~flow.models.AetherComputeConfiguration
:ivar resource_configurtion:
:vartype resource_configurtion: ~flow.models.AetherResourceConfiguration
:ivar environment_id:
:vartype environment_id: str
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
"""
_attribute_map = {
'general_settings': {'key': 'generalSettings', 'type': 'AetherGeneralSettings'},
'limit_settings': {'key': 'limitSettings', 'type': 'AetherLimitSettings'},
'data_settings': {'key': 'dataSettings', 'type': 'AetherDataSettings'},
'forecasting_settings': {'key': 'forecastingSettings', 'type': 'AetherForecastingSettings'},
'training_settings': {'key': 'trainingSettings', 'type': 'AetherTrainingSettings'},
'sweep_settings': {'key': 'sweepSettings', 'type': 'AetherSweepSettings'},
'image_model_settings': {'key': 'imageModelSettings', 'type': '{object}'},
'properties': {'key': 'properties', 'type': '{str}'},
'compute_configuration': {'key': 'computeConfiguration', 'type': 'AetherComputeConfiguration'},
'resource_configurtion': {'key': 'resourceConfigurtion', 'type': 'AetherResourceConfiguration'},
'environment_id': {'key': 'environmentId', 'type': 'str'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword general_settings:
:paramtype general_settings: ~flow.models.AetherGeneralSettings
:keyword limit_settings:
:paramtype limit_settings: ~flow.models.AetherLimitSettings
:keyword data_settings:
:paramtype data_settings: ~flow.models.AetherDataSettings
:keyword forecasting_settings:
:paramtype forecasting_settings: ~flow.models.AetherForecastingSettings
:keyword training_settings:
:paramtype training_settings: ~flow.models.AetherTrainingSettings
:keyword sweep_settings:
:paramtype sweep_settings: ~flow.models.AetherSweepSettings
:keyword image_model_settings: Dictionary of :code:`<any>`.
:paramtype image_model_settings: dict[str, any]
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword compute_configuration:
:paramtype compute_configuration: ~flow.models.AetherComputeConfiguration
:keyword resource_configurtion:
:paramtype resource_configurtion: ~flow.models.AetherResourceConfiguration
:keyword environment_id:
:paramtype environment_id: str
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
"""
super(AetherAutoTrainConfiguration, self).__init__(**kwargs)
self.general_settings = kwargs.get('general_settings', None)
self.limit_settings = kwargs.get('limit_settings', None)
self.data_settings = kwargs.get('data_settings', None)
self.forecasting_settings = kwargs.get('forecasting_settings', None)
self.training_settings = kwargs.get('training_settings', None)
self.sweep_settings = kwargs.get('sweep_settings', None)
self.image_model_settings = kwargs.get('image_model_settings', None)
self.properties = kwargs.get('properties', None)
self.compute_configuration = kwargs.get('compute_configuration', None)
self.resource_configurtion = kwargs.get('resource_configurtion', None)
self.environment_id = kwargs.get('environment_id', None)
self.environment_variables = kwargs.get('environment_variables', None)
class AetherAzureBlobReference(msrest.serialization.Model):
"""AetherAzureBlobReference.
:ivar container:
:vartype container: str
:ivar sas_token:
:vartype sas_token: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar path_type: Possible values include: "Unknown", "File", "Folder".
:vartype path_type: str or ~flow.models.AetherFileBasedPathType
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'container': {'key': 'container', 'type': 'str'},
'sas_token': {'key': 'sasToken', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'path_type': {'key': 'pathType', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword container:
:paramtype container: str
:keyword sas_token:
:paramtype sas_token: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword path_type: Possible values include: "Unknown", "File", "Folder".
:paramtype path_type: str or ~flow.models.AetherFileBasedPathType
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherAzureBlobReference, self).__init__(**kwargs)
self.container = kwargs.get('container', None)
self.sas_token = kwargs.get('sas_token', None)
self.uri = kwargs.get('uri', None)
self.account = kwargs.get('account', None)
self.relative_path = kwargs.get('relative_path', None)
self.path_type = kwargs.get('path_type', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AetherAzureDatabaseReference(msrest.serialization.Model):
"""AetherAzureDatabaseReference.
:ivar server_uri:
:vartype server_uri: str
:ivar database_name:
:vartype database_name: str
:ivar table_name:
:vartype table_name: str
:ivar sql_query:
:vartype sql_query: str
:ivar stored_procedure_name:
:vartype stored_procedure_name: str
:ivar stored_procedure_parameters:
:vartype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter]
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'server_uri': {'key': 'serverUri', 'type': 'str'},
'database_name': {'key': 'databaseName', 'type': 'str'},
'table_name': {'key': 'tableName', 'type': 'str'},
'sql_query': {'key': 'sqlQuery', 'type': 'str'},
'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[AetherStoredProcedureParameter]'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword server_uri:
:paramtype server_uri: str
:keyword database_name:
:paramtype database_name: str
:keyword table_name:
:paramtype table_name: str
:keyword sql_query:
:paramtype sql_query: str
:keyword stored_procedure_name:
:paramtype stored_procedure_name: str
:keyword stored_procedure_parameters:
:paramtype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter]
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherAzureDatabaseReference, self).__init__(**kwargs)
self.server_uri = kwargs.get('server_uri', None)
self.database_name = kwargs.get('database_name', None)
self.table_name = kwargs.get('table_name', None)
self.sql_query = kwargs.get('sql_query', None)
self.stored_procedure_name = kwargs.get('stored_procedure_name', None)
self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AetherAzureDataLakeGen2Reference(msrest.serialization.Model):
"""AetherAzureDataLakeGen2Reference.
:ivar file_system_name:
:vartype file_system_name: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar path_type: Possible values include: "Unknown", "File", "Folder".
:vartype path_type: str or ~flow.models.AetherFileBasedPathType
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'file_system_name': {'key': 'fileSystemName', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'path_type': {'key': 'pathType', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword file_system_name:
:paramtype file_system_name: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword path_type: Possible values include: "Unknown", "File", "Folder".
:paramtype path_type: str or ~flow.models.AetherFileBasedPathType
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherAzureDataLakeGen2Reference, self).__init__(**kwargs)
self.file_system_name = kwargs.get('file_system_name', None)
self.uri = kwargs.get('uri', None)
self.account = kwargs.get('account', None)
self.relative_path = kwargs.get('relative_path', None)
self.path_type = kwargs.get('path_type', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AetherAzureDataLakeReference(msrest.serialization.Model):
"""AetherAzureDataLakeReference.
:ivar tenant:
:vartype tenant: str
:ivar subscription:
:vartype subscription: str
:ivar resource_group:
:vartype resource_group: str
:ivar data_lake_uri:
:vartype data_lake_uri: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar path_type: Possible values include: "Unknown", "File", "Folder".
:vartype path_type: str or ~flow.models.AetherFileBasedPathType
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'tenant': {'key': 'tenant', 'type': 'str'},
'subscription': {'key': 'subscription', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'data_lake_uri': {'key': 'dataLakeUri', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'path_type': {'key': 'pathType', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword tenant:
:paramtype tenant: str
:keyword subscription:
:paramtype subscription: str
:keyword resource_group:
:paramtype resource_group: str
:keyword data_lake_uri:
:paramtype data_lake_uri: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword path_type: Possible values include: "Unknown", "File", "Folder".
:paramtype path_type: str or ~flow.models.AetherFileBasedPathType
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherAzureDataLakeReference, self).__init__(**kwargs)
self.tenant = kwargs.get('tenant', None)
self.subscription = kwargs.get('subscription', None)
self.resource_group = kwargs.get('resource_group', None)
self.data_lake_uri = kwargs.get('data_lake_uri', None)
self.uri = kwargs.get('uri', None)
self.account = kwargs.get('account', None)
self.relative_path = kwargs.get('relative_path', None)
self.path_type = kwargs.get('path_type', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AetherAzureFilesReference(msrest.serialization.Model):
"""AetherAzureFilesReference.
:ivar share:
:vartype share: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar path_type: Possible values include: "Unknown", "File", "Folder".
:vartype path_type: str or ~flow.models.AetherFileBasedPathType
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'share': {'key': 'share', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'path_type': {'key': 'pathType', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword share:
:paramtype share: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword path_type: Possible values include: "Unknown", "File", "Folder".
:paramtype path_type: str or ~flow.models.AetherFileBasedPathType
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherAzureFilesReference, self).__init__(**kwargs)
self.share = kwargs.get('share', None)
self.uri = kwargs.get('uri', None)
self.account = kwargs.get('account', None)
self.relative_path = kwargs.get('relative_path', None)
self.path_type = kwargs.get('path_type', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AetherBatchAiComputeInfo(msrest.serialization.Model):
"""AetherBatchAiComputeInfo.
:ivar batch_ai_subscription_id:
:vartype batch_ai_subscription_id: str
:ivar batch_ai_resource_group:
:vartype batch_ai_resource_group: str
:ivar batch_ai_workspace_name:
:vartype batch_ai_workspace_name: str
:ivar cluster_name:
:vartype cluster_name: str
:ivar native_shared_directory:
:vartype native_shared_directory: str
"""
_attribute_map = {
'batch_ai_subscription_id': {'key': 'batchAiSubscriptionId', 'type': 'str'},
'batch_ai_resource_group': {'key': 'batchAiResourceGroup', 'type': 'str'},
'batch_ai_workspace_name': {'key': 'batchAiWorkspaceName', 'type': 'str'},
'cluster_name': {'key': 'clusterName', 'type': 'str'},
'native_shared_directory': {'key': 'nativeSharedDirectory', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword batch_ai_subscription_id:
:paramtype batch_ai_subscription_id: str
:keyword batch_ai_resource_group:
:paramtype batch_ai_resource_group: str
:keyword batch_ai_workspace_name:
:paramtype batch_ai_workspace_name: str
:keyword cluster_name:
:paramtype cluster_name: str
:keyword native_shared_directory:
:paramtype native_shared_directory: str
"""
super(AetherBatchAiComputeInfo, self).__init__(**kwargs)
self.batch_ai_subscription_id = kwargs.get('batch_ai_subscription_id', None)
self.batch_ai_resource_group = kwargs.get('batch_ai_resource_group', None)
self.batch_ai_workspace_name = kwargs.get('batch_ai_workspace_name', None)
self.cluster_name = kwargs.get('cluster_name', None)
self.native_shared_directory = kwargs.get('native_shared_directory', None)
class AetherBuildArtifactInfo(msrest.serialization.Model):
"""AetherBuildArtifactInfo.
:ivar type: Possible values include: "CloudBuild", "Vso", "VsoGit".
:vartype type: str or ~flow.models.AetherBuildSourceType
:ivar cloud_build_drop_path_info:
:vartype cloud_build_drop_path_info: ~flow.models.AetherCloudBuildDropPathInfo
:ivar vso_build_artifact_info:
:vartype vso_build_artifact_info: ~flow.models.AetherVsoBuildArtifactInfo
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'cloud_build_drop_path_info': {'key': 'cloudBuildDropPathInfo', 'type': 'AetherCloudBuildDropPathInfo'},
'vso_build_artifact_info': {'key': 'vsoBuildArtifactInfo', 'type': 'AetherVsoBuildArtifactInfo'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "CloudBuild", "Vso", "VsoGit".
:paramtype type: str or ~flow.models.AetherBuildSourceType
:keyword cloud_build_drop_path_info:
:paramtype cloud_build_drop_path_info: ~flow.models.AetherCloudBuildDropPathInfo
:keyword vso_build_artifact_info:
:paramtype vso_build_artifact_info: ~flow.models.AetherVsoBuildArtifactInfo
"""
super(AetherBuildArtifactInfo, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.cloud_build_drop_path_info = kwargs.get('cloud_build_drop_path_info', None)
self.vso_build_artifact_info = kwargs.get('vso_build_artifact_info', None)
class AetherCloudBuildDropPathInfo(msrest.serialization.Model):
"""AetherCloudBuildDropPathInfo.
:ivar build_info:
:vartype build_info: ~flow.models.AetherCloudBuildInfo
:ivar root:
:vartype root: str
"""
_attribute_map = {
'build_info': {'key': 'buildInfo', 'type': 'AetherCloudBuildInfo'},
'root': {'key': 'root', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword build_info:
:paramtype build_info: ~flow.models.AetherCloudBuildInfo
:keyword root:
:paramtype root: str
"""
super(AetherCloudBuildDropPathInfo, self).__init__(**kwargs)
self.build_info = kwargs.get('build_info', None)
self.root = kwargs.get('root', None)
class AetherCloudBuildInfo(msrest.serialization.Model):
"""AetherCloudBuildInfo.
:ivar queue_info:
:vartype queue_info: ~flow.models.AetherCloudBuildQueueInfo
:ivar build_id:
:vartype build_id: str
:ivar drop_url:
:vartype drop_url: str
"""
_attribute_map = {
'queue_info': {'key': 'queueInfo', 'type': 'AetherCloudBuildQueueInfo'},
'build_id': {'key': 'buildId', 'type': 'str'},
'drop_url': {'key': 'dropUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword queue_info:
:paramtype queue_info: ~flow.models.AetherCloudBuildQueueInfo
:keyword build_id:
:paramtype build_id: str
:keyword drop_url:
:paramtype drop_url: str
"""
super(AetherCloudBuildInfo, self).__init__(**kwargs)
self.queue_info = kwargs.get('queue_info', None)
self.build_id = kwargs.get('build_id', None)
self.drop_url = kwargs.get('drop_url', None)
class AetherCloudBuildQueueInfo(msrest.serialization.Model):
"""AetherCloudBuildQueueInfo.
:ivar build_queue:
:vartype build_queue: str
:ivar build_role:
:vartype build_role: str
"""
_attribute_map = {
'build_queue': {'key': 'buildQueue', 'type': 'str'},
'build_role': {'key': 'buildRole', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword build_queue:
:paramtype build_queue: str
:keyword build_role:
:paramtype build_role: str
"""
super(AetherCloudBuildQueueInfo, self).__init__(**kwargs)
self.build_queue = kwargs.get('build_queue', None)
self.build_role = kwargs.get('build_role', None)
class AetherCloudPrioritySetting(msrest.serialization.Model):
"""AetherCloudPrioritySetting.
:ivar scope_priority:
:vartype scope_priority: ~flow.models.AetherPriorityConfiguration
:ivar aml_compute_priority:
:vartype aml_compute_priority: ~flow.models.AetherPriorityConfiguration
:ivar itp_priority:
:vartype itp_priority: ~flow.models.AetherPriorityConfiguration
:ivar singularity_priority:
:vartype singularity_priority: ~flow.models.AetherPriorityConfiguration
"""
_attribute_map = {
'scope_priority': {'key': 'scopePriority', 'type': 'AetherPriorityConfiguration'},
'aml_compute_priority': {'key': 'AmlComputePriority', 'type': 'AetherPriorityConfiguration'},
'itp_priority': {'key': 'ItpPriority', 'type': 'AetherPriorityConfiguration'},
'singularity_priority': {'key': 'SingularityPriority', 'type': 'AetherPriorityConfiguration'},
}
def __init__(
self,
**kwargs
):
"""
:keyword scope_priority:
:paramtype scope_priority: ~flow.models.AetherPriorityConfiguration
:keyword aml_compute_priority:
:paramtype aml_compute_priority: ~flow.models.AetherPriorityConfiguration
:keyword itp_priority:
:paramtype itp_priority: ~flow.models.AetherPriorityConfiguration
:keyword singularity_priority:
:paramtype singularity_priority: ~flow.models.AetherPriorityConfiguration
"""
super(AetherCloudPrioritySetting, self).__init__(**kwargs)
self.scope_priority = kwargs.get('scope_priority', None)
self.aml_compute_priority = kwargs.get('aml_compute_priority', None)
self.itp_priority = kwargs.get('itp_priority', None)
self.singularity_priority = kwargs.get('singularity_priority', None)
class AetherCloudSettings(msrest.serialization.Model):
"""AetherCloudSettings.
:ivar linked_settings:
:vartype linked_settings: list[~flow.models.AetherParameterAssignment]
:ivar priority_config:
:vartype priority_config: ~flow.models.AetherPriorityConfiguration
:ivar hdi_run_config:
:vartype hdi_run_config: ~flow.models.AetherHdiRunConfiguration
:ivar sub_graph_config:
:vartype sub_graph_config: ~flow.models.AetherSubGraphConfiguration
:ivar auto_ml_component_config:
:vartype auto_ml_component_config: ~flow.models.AetherAutoMLComponentConfiguration
:ivar ap_cloud_config:
:vartype ap_cloud_config: ~flow.models.AetherAPCloudConfiguration
:ivar scope_cloud_config:
:vartype scope_cloud_config: ~flow.models.AetherScopeCloudConfiguration
:ivar es_cloud_config:
:vartype es_cloud_config: ~flow.models.AetherEsCloudConfiguration
:ivar data_transfer_cloud_config:
:vartype data_transfer_cloud_config: ~flow.models.AetherDataTransferCloudConfiguration
:ivar aml_spark_cloud_setting:
:vartype aml_spark_cloud_setting: ~flow.models.AetherAmlSparkCloudSetting
:ivar data_transfer_v2_cloud_setting:
:vartype data_transfer_v2_cloud_setting: ~flow.models.AetherDataTransferV2CloudSetting
"""
_attribute_map = {
'linked_settings': {'key': 'linkedSettings', 'type': '[AetherParameterAssignment]'},
'priority_config': {'key': 'priorityConfig', 'type': 'AetherPriorityConfiguration'},
'hdi_run_config': {'key': 'hdiRunConfig', 'type': 'AetherHdiRunConfiguration'},
'sub_graph_config': {'key': 'subGraphConfig', 'type': 'AetherSubGraphConfiguration'},
'auto_ml_component_config': {'key': 'autoMLComponentConfig', 'type': 'AetherAutoMLComponentConfiguration'},
'ap_cloud_config': {'key': 'apCloudConfig', 'type': 'AetherAPCloudConfiguration'},
'scope_cloud_config': {'key': 'scopeCloudConfig', 'type': 'AetherScopeCloudConfiguration'},
'es_cloud_config': {'key': 'esCloudConfig', 'type': 'AetherEsCloudConfiguration'},
'data_transfer_cloud_config': {'key': 'dataTransferCloudConfig', 'type': 'AetherDataTransferCloudConfiguration'},
'aml_spark_cloud_setting': {'key': 'amlSparkCloudSetting', 'type': 'AetherAmlSparkCloudSetting'},
'data_transfer_v2_cloud_setting': {'key': 'dataTransferV2CloudSetting', 'type': 'AetherDataTransferV2CloudSetting'},
}
def __init__(
self,
**kwargs
):
"""
:keyword linked_settings:
:paramtype linked_settings: list[~flow.models.AetherParameterAssignment]
:keyword priority_config:
:paramtype priority_config: ~flow.models.AetherPriorityConfiguration
:keyword hdi_run_config:
:paramtype hdi_run_config: ~flow.models.AetherHdiRunConfiguration
:keyword sub_graph_config:
:paramtype sub_graph_config: ~flow.models.AetherSubGraphConfiguration
:keyword auto_ml_component_config:
:paramtype auto_ml_component_config: ~flow.models.AetherAutoMLComponentConfiguration
:keyword ap_cloud_config:
:paramtype ap_cloud_config: ~flow.models.AetherAPCloudConfiguration
:keyword scope_cloud_config:
:paramtype scope_cloud_config: ~flow.models.AetherScopeCloudConfiguration
:keyword es_cloud_config:
:paramtype es_cloud_config: ~flow.models.AetherEsCloudConfiguration
:keyword data_transfer_cloud_config:
:paramtype data_transfer_cloud_config: ~flow.models.AetherDataTransferCloudConfiguration
:keyword aml_spark_cloud_setting:
:paramtype aml_spark_cloud_setting: ~flow.models.AetherAmlSparkCloudSetting
:keyword data_transfer_v2_cloud_setting:
:paramtype data_transfer_v2_cloud_setting: ~flow.models.AetherDataTransferV2CloudSetting
"""
super(AetherCloudSettings, self).__init__(**kwargs)
self.linked_settings = kwargs.get('linked_settings', None)
self.priority_config = kwargs.get('priority_config', None)
self.hdi_run_config = kwargs.get('hdi_run_config', None)
self.sub_graph_config = kwargs.get('sub_graph_config', None)
self.auto_ml_component_config = kwargs.get('auto_ml_component_config', None)
self.ap_cloud_config = kwargs.get('ap_cloud_config', None)
self.scope_cloud_config = kwargs.get('scope_cloud_config', None)
self.es_cloud_config = kwargs.get('es_cloud_config', None)
self.data_transfer_cloud_config = kwargs.get('data_transfer_cloud_config', None)
self.aml_spark_cloud_setting = kwargs.get('aml_spark_cloud_setting', None)
self.data_transfer_v2_cloud_setting = kwargs.get('data_transfer_v2_cloud_setting', None)
class AetherColumnTransformer(msrest.serialization.Model):
"""AetherColumnTransformer.
:ivar fields:
:vartype fields: list[str]
:ivar parameters: Anything.
:vartype parameters: any
"""
_attribute_map = {
'fields': {'key': 'fields', 'type': '[str]'},
'parameters': {'key': 'parameters', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword fields:
:paramtype fields: list[str]
:keyword parameters: Anything.
:paramtype parameters: any
"""
super(AetherColumnTransformer, self).__init__(**kwargs)
self.fields = kwargs.get('fields', None)
self.parameters = kwargs.get('parameters', None)
class AetherComputeConfiguration(msrest.serialization.Model):
"""AetherComputeConfiguration.
:ivar target:
:vartype target: str
:ivar instance_count:
:vartype instance_count: int
:ivar is_local:
:vartype is_local: bool
:ivar location:
:vartype location: str
:ivar is_clusterless:
:vartype is_clusterless: bool
:ivar instance_type:
:vartype instance_type: str
:ivar properties: Dictionary of :code:`<any>`.
:vartype properties: dict[str, any]
:ivar is_preemptable:
:vartype is_preemptable: bool
"""
_attribute_map = {
'target': {'key': 'target', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'is_local': {'key': 'isLocal', 'type': 'bool'},
'location': {'key': 'location', 'type': 'str'},
'is_clusterless': {'key': 'isClusterless', 'type': 'bool'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
'is_preemptable': {'key': 'isPreemptable', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword target:
:paramtype target: str
:keyword instance_count:
:paramtype instance_count: int
:keyword is_local:
:paramtype is_local: bool
:keyword location:
:paramtype location: str
:keyword is_clusterless:
:paramtype is_clusterless: bool
:keyword instance_type:
:paramtype instance_type: str
:keyword properties: Dictionary of :code:`<any>`.
:paramtype properties: dict[str, any]
:keyword is_preemptable:
:paramtype is_preemptable: bool
"""
super(AetherComputeConfiguration, self).__init__(**kwargs)
self.target = kwargs.get('target', None)
self.instance_count = kwargs.get('instance_count', None)
self.is_local = kwargs.get('is_local', None)
self.location = kwargs.get('location', None)
self.is_clusterless = kwargs.get('is_clusterless', None)
self.instance_type = kwargs.get('instance_type', None)
self.properties = kwargs.get('properties', None)
self.is_preemptable = kwargs.get('is_preemptable', None)
class AetherComputeSetting(msrest.serialization.Model):
"""AetherComputeSetting.
:ivar name:
:vartype name: str
:ivar compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker",
"Databricks", "Aisc".
:vartype compute_type: str or ~flow.models.AetherComputeType
:ivar batch_ai_compute_info:
:vartype batch_ai_compute_info: ~flow.models.AetherBatchAiComputeInfo
:ivar remote_docker_compute_info:
:vartype remote_docker_compute_info: ~flow.models.AetherRemoteDockerComputeInfo
:ivar hdi_cluster_compute_info:
:vartype hdi_cluster_compute_info: ~flow.models.AetherHdiClusterComputeInfo
:ivar mlc_compute_info:
:vartype mlc_compute_info: ~flow.models.AetherMlcComputeInfo
:ivar databricks_compute_info:
:vartype databricks_compute_info: ~flow.models.AetherDatabricksComputeInfo
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'batch_ai_compute_info': {'key': 'batchAiComputeInfo', 'type': 'AetherBatchAiComputeInfo'},
'remote_docker_compute_info': {'key': 'remoteDockerComputeInfo', 'type': 'AetherRemoteDockerComputeInfo'},
'hdi_cluster_compute_info': {'key': 'hdiClusterComputeInfo', 'type': 'AetherHdiClusterComputeInfo'},
'mlc_compute_info': {'key': 'mlcComputeInfo', 'type': 'AetherMlcComputeInfo'},
'databricks_compute_info': {'key': 'databricksComputeInfo', 'type': 'AetherDatabricksComputeInfo'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker",
"Databricks", "Aisc".
:paramtype compute_type: str or ~flow.models.AetherComputeType
:keyword batch_ai_compute_info:
:paramtype batch_ai_compute_info: ~flow.models.AetherBatchAiComputeInfo
:keyword remote_docker_compute_info:
:paramtype remote_docker_compute_info: ~flow.models.AetherRemoteDockerComputeInfo
:keyword hdi_cluster_compute_info:
:paramtype hdi_cluster_compute_info: ~flow.models.AetherHdiClusterComputeInfo
:keyword mlc_compute_info:
:paramtype mlc_compute_info: ~flow.models.AetherMlcComputeInfo
:keyword databricks_compute_info:
:paramtype databricks_compute_info: ~flow.models.AetherDatabricksComputeInfo
"""
super(AetherComputeSetting, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.compute_type = kwargs.get('compute_type', None)
self.batch_ai_compute_info = kwargs.get('batch_ai_compute_info', None)
self.remote_docker_compute_info = kwargs.get('remote_docker_compute_info', None)
self.hdi_cluster_compute_info = kwargs.get('hdi_cluster_compute_info', None)
self.mlc_compute_info = kwargs.get('mlc_compute_info', None)
self.databricks_compute_info = kwargs.get('databricks_compute_info', None)
class AetherControlInput(msrest.serialization.Model):
"""AetherControlInput.
:ivar name:
:vartype name: str
:ivar default_value: Possible values include: "None", "False", "True", "Skipped".
:vartype default_value: str or ~flow.models.AetherControlInputValue
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword default_value: Possible values include: "None", "False", "True", "Skipped".
:paramtype default_value: str or ~flow.models.AetherControlInputValue
"""
super(AetherControlInput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.default_value = kwargs.get('default_value', None)
class AetherControlOutput(msrest.serialization.Model):
"""AetherControlOutput.
:ivar name:
:vartype name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
"""
super(AetherControlOutput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class AetherCopyDataTask(msrest.serialization.Model):
"""AetherCopyDataTask.
:ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:vartype data_copy_mode: str or ~flow.models.AetherDataCopyMode
"""
_attribute_map = {
'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:paramtype data_copy_mode: str or ~flow.models.AetherDataCopyMode
"""
super(AetherCopyDataTask, self).__init__(**kwargs)
self.data_copy_mode = kwargs.get('data_copy_mode', None)
class AetherCosmosReference(msrest.serialization.Model):
"""AetherCosmosReference.
:ivar cluster:
:vartype cluster: str
:ivar vc:
:vartype vc: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'cluster': {'key': 'cluster', 'type': 'str'},
'vc': {'key': 'vc', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword cluster:
:paramtype cluster: str
:keyword vc:
:paramtype vc: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(AetherCosmosReference, self).__init__(**kwargs)
self.cluster = kwargs.get('cluster', None)
self.vc = kwargs.get('vc', None)
self.relative_path = kwargs.get('relative_path', None)
class AetherCreatedBy(msrest.serialization.Model):
"""AetherCreatedBy.
:ivar user_object_id:
:vartype user_object_id: str
:ivar user_tenant_id:
:vartype user_tenant_id: str
:ivar user_name:
:vartype user_name: str
:ivar puid:
:vartype puid: str
:ivar iss:
:vartype iss: str
:ivar idp:
:vartype idp: str
:ivar altsec_id:
:vartype altsec_id: str
:ivar source_ip:
:vartype source_ip: str
:ivar skip_registry_private_link_check:
:vartype skip_registry_private_link_check: bool
"""
_attribute_map = {
'user_object_id': {'key': 'userObjectId', 'type': 'str'},
'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
'user_name': {'key': 'userName', 'type': 'str'},
'puid': {'key': 'puid', 'type': 'str'},
'iss': {'key': 'iss', 'type': 'str'},
'idp': {'key': 'idp', 'type': 'str'},
'altsec_id': {'key': 'altsecId', 'type': 'str'},
'source_ip': {'key': 'sourceIp', 'type': 'str'},
'skip_registry_private_link_check': {'key': 'skipRegistryPrivateLinkCheck', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword user_object_id:
:paramtype user_object_id: str
:keyword user_tenant_id:
:paramtype user_tenant_id: str
:keyword user_name:
:paramtype user_name: str
:keyword puid:
:paramtype puid: str
:keyword iss:
:paramtype iss: str
:keyword idp:
:paramtype idp: str
:keyword altsec_id:
:paramtype altsec_id: str
:keyword source_ip:
:paramtype source_ip: str
:keyword skip_registry_private_link_check:
:paramtype skip_registry_private_link_check: bool
"""
super(AetherCreatedBy, self).__init__(**kwargs)
self.user_object_id = kwargs.get('user_object_id', None)
self.user_tenant_id = kwargs.get('user_tenant_id', None)
self.user_name = kwargs.get('user_name', None)
self.puid = kwargs.get('puid', None)
self.iss = kwargs.get('iss', None)
self.idp = kwargs.get('idp', None)
self.altsec_id = kwargs.get('altsec_id', None)
self.source_ip = kwargs.get('source_ip', None)
self.skip_registry_private_link_check = kwargs.get('skip_registry_private_link_check', None)
class AetherCustomReference(msrest.serialization.Model):
"""AetherCustomReference.
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(AetherCustomReference, self).__init__(**kwargs)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
class AetherDatabaseSink(msrest.serialization.Model):
"""AetherDatabaseSink.
:ivar connection:
:vartype connection: str
:ivar table:
:vartype table: str
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'table': {'key': 'table', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword table:
:paramtype table: str
"""
super(AetherDatabaseSink, self).__init__(**kwargs)
self.connection = kwargs.get('connection', None)
self.table = kwargs.get('table', None)
class AetherDatabaseSource(msrest.serialization.Model):
"""AetherDatabaseSource.
:ivar connection:
:vartype connection: str
:ivar query:
:vartype query: str
:ivar stored_procedure_name:
:vartype stored_procedure_name: str
:ivar stored_procedure_parameters:
:vartype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter]
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'query': {'key': 'query', 'type': 'str'},
'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[AetherStoredProcedureParameter]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword query:
:paramtype query: str
:keyword stored_procedure_name:
:paramtype stored_procedure_name: str
:keyword stored_procedure_parameters:
:paramtype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter]
"""
super(AetherDatabaseSource, self).__init__(**kwargs)
self.connection = kwargs.get('connection', None)
self.query = kwargs.get('query', None)
self.stored_procedure_name = kwargs.get('stored_procedure_name', None)
self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None)
class AetherDatabricksComputeInfo(msrest.serialization.Model):
"""AetherDatabricksComputeInfo.
:ivar existing_cluster_id:
:vartype existing_cluster_id: str
"""
_attribute_map = {
'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword existing_cluster_id:
:paramtype existing_cluster_id: str
"""
super(AetherDatabricksComputeInfo, self).__init__(**kwargs)
self.existing_cluster_id = kwargs.get('existing_cluster_id', None)
class AetherDataLocation(msrest.serialization.Model):
"""AetherDataLocation.
:ivar storage_type: Possible values include: "Cosmos", "AzureBlob", "Artifact", "Snapshot",
"SavedAmlDataset", "Asset".
:vartype storage_type: str or ~flow.models.AetherDataLocationStorageType
:ivar storage_id:
:vartype storage_id: str
:ivar uri:
:vartype uri: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_reference:
:vartype data_reference: ~flow.models.AetherDataReference
:ivar aml_dataset:
:vartype aml_dataset: ~flow.models.AetherAmlDataset
:ivar asset_definition:
:vartype asset_definition: ~flow.models.AetherAssetDefinition
:ivar is_compliant:
:vartype is_compliant: bool
:ivar reuse_calculation_fields:
:vartype reuse_calculation_fields: ~flow.models.AetherDataLocationReuseCalculationFields
"""
_attribute_map = {
'storage_type': {'key': 'storageType', 'type': 'str'},
'storage_id': {'key': 'storageId', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_reference': {'key': 'dataReference', 'type': 'AetherDataReference'},
'aml_dataset': {'key': 'amlDataset', 'type': 'AetherAmlDataset'},
'asset_definition': {'key': 'assetDefinition', 'type': 'AetherAssetDefinition'},
'is_compliant': {'key': 'isCompliant', 'type': 'bool'},
'reuse_calculation_fields': {'key': 'reuseCalculationFields', 'type': 'AetherDataLocationReuseCalculationFields'},
}
def __init__(
self,
**kwargs
):
"""
:keyword storage_type: Possible values include: "Cosmos", "AzureBlob", "Artifact", "Snapshot",
"SavedAmlDataset", "Asset".
:paramtype storage_type: str or ~flow.models.AetherDataLocationStorageType
:keyword storage_id:
:paramtype storage_id: str
:keyword uri:
:paramtype uri: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_reference:
:paramtype data_reference: ~flow.models.AetherDataReference
:keyword aml_dataset:
:paramtype aml_dataset: ~flow.models.AetherAmlDataset
:keyword asset_definition:
:paramtype asset_definition: ~flow.models.AetherAssetDefinition
:keyword is_compliant:
:paramtype is_compliant: bool
:keyword reuse_calculation_fields:
:paramtype reuse_calculation_fields: ~flow.models.AetherDataLocationReuseCalculationFields
"""
super(AetherDataLocation, self).__init__(**kwargs)
self.storage_type = kwargs.get('storage_type', None)
self.storage_id = kwargs.get('storage_id', None)
self.uri = kwargs.get('uri', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_reference = kwargs.get('data_reference', None)
self.aml_dataset = kwargs.get('aml_dataset', None)
self.asset_definition = kwargs.get('asset_definition', None)
self.is_compliant = kwargs.get('is_compliant', None)
self.reuse_calculation_fields = kwargs.get('reuse_calculation_fields', None)
class AetherDataLocationReuseCalculationFields(msrest.serialization.Model):
"""AetherDataLocationReuseCalculationFields.
:ivar data_store_name:
:vartype data_store_name: str
:ivar relative_path:
:vartype relative_path: str
:ivar data_experiment_id:
:vartype data_experiment_id: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'data_experiment_id': {'key': 'dataExperimentId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
:keyword data_experiment_id:
:paramtype data_experiment_id: str
"""
super(AetherDataLocationReuseCalculationFields, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
self.data_experiment_id = kwargs.get('data_experiment_id', None)
class AetherDataPath(msrest.serialization.Model):
"""AetherDataPath.
:ivar data_store_name:
:vartype data_store_name: str
:ivar relative_path:
:vartype relative_path: str
:ivar sql_data_path:
:vartype sql_data_path: ~flow.models.AetherSqlDataPath
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'sql_data_path': {'key': 'sqlDataPath', 'type': 'AetherSqlDataPath'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
:keyword sql_data_path:
:paramtype sql_data_path: ~flow.models.AetherSqlDataPath
"""
super(AetherDataPath, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
self.sql_data_path = kwargs.get('sql_data_path', None)
class AetherDataReference(msrest.serialization.Model):
"""AetherDataReference.
:ivar type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles",
"Cosmos", "PhillyHdfs", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2",
"DBFS", "AzureMySqlDatabase", "Custom", "Hdfs".
:vartype type: str or ~flow.models.AetherDataReferenceType
:ivar azure_blob_reference:
:vartype azure_blob_reference: ~flow.models.AetherAzureBlobReference
:ivar azure_data_lake_reference:
:vartype azure_data_lake_reference: ~flow.models.AetherAzureDataLakeReference
:ivar azure_files_reference:
:vartype azure_files_reference: ~flow.models.AetherAzureFilesReference
:ivar cosmos_reference:
:vartype cosmos_reference: ~flow.models.AetherCosmosReference
:ivar philly_hdfs_reference:
:vartype philly_hdfs_reference: ~flow.models.AetherPhillyHdfsReference
:ivar azure_sql_database_reference:
:vartype azure_sql_database_reference: ~flow.models.AetherAzureDatabaseReference
:ivar azure_postgres_database_reference:
:vartype azure_postgres_database_reference: ~flow.models.AetherAzureDatabaseReference
:ivar azure_data_lake_gen2_reference:
:vartype azure_data_lake_gen2_reference: ~flow.models.AetherAzureDataLakeGen2Reference
:ivar dbfs_reference:
:vartype dbfs_reference: ~flow.models.AetherDBFSReference
:ivar azure_my_sql_database_reference:
:vartype azure_my_sql_database_reference: ~flow.models.AetherAzureDatabaseReference
:ivar custom_reference:
:vartype custom_reference: ~flow.models.AetherCustomReference
:ivar hdfs_reference:
:vartype hdfs_reference: ~flow.models.AetherHdfsReference
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'azure_blob_reference': {'key': 'azureBlobReference', 'type': 'AetherAzureBlobReference'},
'azure_data_lake_reference': {'key': 'azureDataLakeReference', 'type': 'AetherAzureDataLakeReference'},
'azure_files_reference': {'key': 'azureFilesReference', 'type': 'AetherAzureFilesReference'},
'cosmos_reference': {'key': 'cosmosReference', 'type': 'AetherCosmosReference'},
'philly_hdfs_reference': {'key': 'phillyHdfsReference', 'type': 'AetherPhillyHdfsReference'},
'azure_sql_database_reference': {'key': 'azureSqlDatabaseReference', 'type': 'AetherAzureDatabaseReference'},
'azure_postgres_database_reference': {'key': 'azurePostgresDatabaseReference', 'type': 'AetherAzureDatabaseReference'},
'azure_data_lake_gen2_reference': {'key': 'azureDataLakeGen2Reference', 'type': 'AetherAzureDataLakeGen2Reference'},
'dbfs_reference': {'key': 'dbfsReference', 'type': 'AetherDBFSReference'},
'azure_my_sql_database_reference': {'key': 'azureMySqlDatabaseReference', 'type': 'AetherAzureDatabaseReference'},
'custom_reference': {'key': 'customReference', 'type': 'AetherCustomReference'},
'hdfs_reference': {'key': 'hdfsReference', 'type': 'AetherHdfsReference'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles",
"Cosmos", "PhillyHdfs", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2",
"DBFS", "AzureMySqlDatabase", "Custom", "Hdfs".
:paramtype type: str or ~flow.models.AetherDataReferenceType
:keyword azure_blob_reference:
:paramtype azure_blob_reference: ~flow.models.AetherAzureBlobReference
:keyword azure_data_lake_reference:
:paramtype azure_data_lake_reference: ~flow.models.AetherAzureDataLakeReference
:keyword azure_files_reference:
:paramtype azure_files_reference: ~flow.models.AetherAzureFilesReference
:keyword cosmos_reference:
:paramtype cosmos_reference: ~flow.models.AetherCosmosReference
:keyword philly_hdfs_reference:
:paramtype philly_hdfs_reference: ~flow.models.AetherPhillyHdfsReference
:keyword azure_sql_database_reference:
:paramtype azure_sql_database_reference: ~flow.models.AetherAzureDatabaseReference
:keyword azure_postgres_database_reference:
:paramtype azure_postgres_database_reference: ~flow.models.AetherAzureDatabaseReference
:keyword azure_data_lake_gen2_reference:
:paramtype azure_data_lake_gen2_reference: ~flow.models.AetherAzureDataLakeGen2Reference
:keyword dbfs_reference:
:paramtype dbfs_reference: ~flow.models.AetherDBFSReference
:keyword azure_my_sql_database_reference:
:paramtype azure_my_sql_database_reference: ~flow.models.AetherAzureDatabaseReference
:keyword custom_reference:
:paramtype custom_reference: ~flow.models.AetherCustomReference
:keyword hdfs_reference:
:paramtype hdfs_reference: ~flow.models.AetherHdfsReference
"""
super(AetherDataReference, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.azure_blob_reference = kwargs.get('azure_blob_reference', None)
self.azure_data_lake_reference = kwargs.get('azure_data_lake_reference', None)
self.azure_files_reference = kwargs.get('azure_files_reference', None)
self.cosmos_reference = kwargs.get('cosmos_reference', None)
self.philly_hdfs_reference = kwargs.get('philly_hdfs_reference', None)
self.azure_sql_database_reference = kwargs.get('azure_sql_database_reference', None)
self.azure_postgres_database_reference = kwargs.get('azure_postgres_database_reference', None)
self.azure_data_lake_gen2_reference = kwargs.get('azure_data_lake_gen2_reference', None)
self.dbfs_reference = kwargs.get('dbfs_reference', None)
self.azure_my_sql_database_reference = kwargs.get('azure_my_sql_database_reference', None)
self.custom_reference = kwargs.get('custom_reference', None)
self.hdfs_reference = kwargs.get('hdfs_reference', None)
class AetherDataSetDefinition(msrest.serialization.Model):
"""AetherDataSetDefinition.
:ivar data_type_short_name:
:vartype data_type_short_name: str
:ivar parameter_name:
:vartype parameter_name: str
:ivar value:
:vartype value: ~flow.models.AetherDataSetDefinitionValue
"""
_attribute_map = {
'data_type_short_name': {'key': 'dataTypeShortName', 'type': 'str'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
'value': {'key': 'value', 'type': 'AetherDataSetDefinitionValue'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_type_short_name:
:paramtype data_type_short_name: str
:keyword parameter_name:
:paramtype parameter_name: str
:keyword value:
:paramtype value: ~flow.models.AetherDataSetDefinitionValue
"""
super(AetherDataSetDefinition, self).__init__(**kwargs)
self.data_type_short_name = kwargs.get('data_type_short_name', None)
self.parameter_name = kwargs.get('parameter_name', None)
self.value = kwargs.get('value', None)
class AetherDataSetDefinitionValue(msrest.serialization.Model):
"""AetherDataSetDefinitionValue.
:ivar literal_value:
:vartype literal_value: ~flow.models.AetherDataPath
:ivar data_set_reference:
:vartype data_set_reference: ~flow.models.AetherRegisteredDataSetReference
:ivar saved_data_set_reference:
:vartype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference
:ivar asset_definition:
:vartype asset_definition: ~flow.models.AetherAssetDefinition
"""
_attribute_map = {
'literal_value': {'key': 'literalValue', 'type': 'AetherDataPath'},
'data_set_reference': {'key': 'dataSetReference', 'type': 'AetherRegisteredDataSetReference'},
'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'AetherSavedDataSetReference'},
'asset_definition': {'key': 'assetDefinition', 'type': 'AetherAssetDefinition'},
}
def __init__(
self,
**kwargs
):
"""
:keyword literal_value:
:paramtype literal_value: ~flow.models.AetherDataPath
:keyword data_set_reference:
:paramtype data_set_reference: ~flow.models.AetherRegisteredDataSetReference
:keyword saved_data_set_reference:
:paramtype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference
:keyword asset_definition:
:paramtype asset_definition: ~flow.models.AetherAssetDefinition
"""
super(AetherDataSetDefinitionValue, self).__init__(**kwargs)
self.literal_value = kwargs.get('literal_value', None)
self.data_set_reference = kwargs.get('data_set_reference', None)
self.saved_data_set_reference = kwargs.get('saved_data_set_reference', None)
self.asset_definition = kwargs.get('asset_definition', None)
class AetherDatasetOutput(msrest.serialization.Model):
"""AetherDatasetOutput.
:ivar dataset_type: Possible values include: "File", "Tabular".
:vartype dataset_type: str or ~flow.models.AetherDatasetType
:ivar dataset_registration:
:vartype dataset_registration: ~flow.models.AetherDatasetRegistration
:ivar dataset_output_options:
:vartype dataset_output_options: ~flow.models.AetherDatasetOutputOptions
"""
_attribute_map = {
'dataset_type': {'key': 'datasetType', 'type': 'str'},
'dataset_registration': {'key': 'datasetRegistration', 'type': 'AetherDatasetRegistration'},
'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'AetherDatasetOutputOptions'},
}
def __init__(
self,
**kwargs
):
"""
:keyword dataset_type: Possible values include: "File", "Tabular".
:paramtype dataset_type: str or ~flow.models.AetherDatasetType
:keyword dataset_registration:
:paramtype dataset_registration: ~flow.models.AetherDatasetRegistration
:keyword dataset_output_options:
:paramtype dataset_output_options: ~flow.models.AetherDatasetOutputOptions
"""
super(AetherDatasetOutput, self).__init__(**kwargs)
self.dataset_type = kwargs.get('dataset_type', None)
self.dataset_registration = kwargs.get('dataset_registration', None)
self.dataset_output_options = kwargs.get('dataset_output_options', None)
class AetherDatasetOutputOptions(msrest.serialization.Model):
"""AetherDatasetOutputOptions.
:ivar source_globs:
:vartype source_globs: ~flow.models.AetherGlobsOptions
:ivar path_on_datastore:
:vartype path_on_datastore: str
:ivar path_on_datastore_parameter_assignment:
:vartype path_on_datastore_parameter_assignment: ~flow.models.AetherParameterAssignment
"""
_attribute_map = {
'source_globs': {'key': 'sourceGlobs', 'type': 'AetherGlobsOptions'},
'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'},
'path_on_datastore_parameter_assignment': {'key': 'PathOnDatastoreParameterAssignment', 'type': 'AetherParameterAssignment'},
}
def __init__(
self,
**kwargs
):
"""
:keyword source_globs:
:paramtype source_globs: ~flow.models.AetherGlobsOptions
:keyword path_on_datastore:
:paramtype path_on_datastore: str
:keyword path_on_datastore_parameter_assignment:
:paramtype path_on_datastore_parameter_assignment: ~flow.models.AetherParameterAssignment
"""
super(AetherDatasetOutputOptions, self).__init__(**kwargs)
self.source_globs = kwargs.get('source_globs', None)
self.path_on_datastore = kwargs.get('path_on_datastore', None)
self.path_on_datastore_parameter_assignment = kwargs.get('path_on_datastore_parameter_assignment', None)
class AetherDatasetRegistration(msrest.serialization.Model):
"""AetherDatasetRegistration.
:ivar name:
:vartype name: str
:ivar create_new_version:
:vartype create_new_version: bool
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'create_new_version': {'key': 'createNewVersion', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword create_new_version:
:paramtype create_new_version: bool
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(AetherDatasetRegistration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.create_new_version = kwargs.get('create_new_version', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.additional_transformations = kwargs.get('additional_transformations', None)
class AetherDataSettings(msrest.serialization.Model):
"""AetherDataSettings.
:ivar target_column_name:
:vartype target_column_name: str
:ivar weight_column_name:
:vartype weight_column_name: str
:ivar positive_label:
:vartype positive_label: str
:ivar validation_data:
:vartype validation_data: ~flow.models.AetherValidationDataSettings
:ivar test_data:
:vartype test_data: ~flow.models.AetherTestDataSettings
"""
_attribute_map = {
'target_column_name': {'key': 'targetColumnName', 'type': 'str'},
'weight_column_name': {'key': 'weightColumnName', 'type': 'str'},
'positive_label': {'key': 'positiveLabel', 'type': 'str'},
'validation_data': {'key': 'validationData', 'type': 'AetherValidationDataSettings'},
'test_data': {'key': 'testData', 'type': 'AetherTestDataSettings'},
}
def __init__(
self,
**kwargs
):
"""
:keyword target_column_name:
:paramtype target_column_name: str
:keyword weight_column_name:
:paramtype weight_column_name: str
:keyword positive_label:
:paramtype positive_label: str
:keyword validation_data:
:paramtype validation_data: ~flow.models.AetherValidationDataSettings
:keyword test_data:
:paramtype test_data: ~flow.models.AetherTestDataSettings
"""
super(AetherDataSettings, self).__init__(**kwargs)
self.target_column_name = kwargs.get('target_column_name', None)
self.weight_column_name = kwargs.get('weight_column_name', None)
self.positive_label = kwargs.get('positive_label', None)
self.validation_data = kwargs.get('validation_data', None)
self.test_data = kwargs.get('test_data', None)
class AetherDatastoreSetting(msrest.serialization.Model):
"""AetherDatastoreSetting.
:ivar data_store_name:
:vartype data_store_name: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
"""
super(AetherDatastoreSetting, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
class AetherDataTransferCloudConfiguration(msrest.serialization.Model):
"""AetherDataTransferCloudConfiguration.
:ivar allow_overwrite:
:vartype allow_overwrite: bool
"""
_attribute_map = {
'allow_overwrite': {'key': 'AllowOverwrite', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword allow_overwrite:
:paramtype allow_overwrite: bool
"""
super(AetherDataTransferCloudConfiguration, self).__init__(**kwargs)
self.allow_overwrite = kwargs.get('allow_overwrite', None)
class AetherDataTransferSink(msrest.serialization.Model):
"""AetherDataTransferSink.
:ivar type: Possible values include: "DataBase", "FileSystem".
:vartype type: str or ~flow.models.AetherDataTransferStorageType
:ivar file_system:
:vartype file_system: ~flow.models.AetherFileSystem
:ivar database_sink:
:vartype database_sink: ~flow.models.AetherDatabaseSink
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'file_system': {'key': 'fileSystem', 'type': 'AetherFileSystem'},
'database_sink': {'key': 'databaseSink', 'type': 'AetherDatabaseSink'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "DataBase", "FileSystem".
:paramtype type: str or ~flow.models.AetherDataTransferStorageType
:keyword file_system:
:paramtype file_system: ~flow.models.AetherFileSystem
:keyword database_sink:
:paramtype database_sink: ~flow.models.AetherDatabaseSink
"""
super(AetherDataTransferSink, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.file_system = kwargs.get('file_system', None)
self.database_sink = kwargs.get('database_sink', None)
class AetherDataTransferSource(msrest.serialization.Model):
"""AetherDataTransferSource.
:ivar type: Possible values include: "DataBase", "FileSystem".
:vartype type: str or ~flow.models.AetherDataTransferStorageType
:ivar file_system:
:vartype file_system: ~flow.models.AetherFileSystem
:ivar database_source:
:vartype database_source: ~flow.models.AetherDatabaseSource
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'file_system': {'key': 'fileSystem', 'type': 'AetherFileSystem'},
'database_source': {'key': 'databaseSource', 'type': 'AetherDatabaseSource'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "DataBase", "FileSystem".
:paramtype type: str or ~flow.models.AetherDataTransferStorageType
:keyword file_system:
:paramtype file_system: ~flow.models.AetherFileSystem
:keyword database_source:
:paramtype database_source: ~flow.models.AetherDatabaseSource
"""
super(AetherDataTransferSource, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.file_system = kwargs.get('file_system', None)
self.database_source = kwargs.get('database_source', None)
class AetherDataTransferV2CloudSetting(msrest.serialization.Model):
"""AetherDataTransferV2CloudSetting.
:ivar task_type: Possible values include: "ImportData", "ExportData", "CopyData".
:vartype task_type: str or ~flow.models.AetherDataTransferTaskType
:ivar compute_name:
:vartype compute_name: str
:ivar copy_data_task:
:vartype copy_data_task: ~flow.models.AetherCopyDataTask
:ivar import_data_task:
:vartype import_data_task: ~flow.models.AetherImportDataTask
:ivar export_data_task:
:vartype export_data_task: ~flow.models.AetherExportDataTask
:ivar data_transfer_sources: This is a dictionary.
:vartype data_transfer_sources: dict[str, ~flow.models.AetherDataTransferSource]
:ivar data_transfer_sinks: This is a dictionary.
:vartype data_transfer_sinks: dict[str, ~flow.models.AetherDataTransferSink]
:ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:vartype data_copy_mode: str or ~flow.models.AetherDataCopyMode
"""
_attribute_map = {
'task_type': {'key': 'taskType', 'type': 'str'},
'compute_name': {'key': 'ComputeName', 'type': 'str'},
'copy_data_task': {'key': 'CopyDataTask', 'type': 'AetherCopyDataTask'},
'import_data_task': {'key': 'ImportDataTask', 'type': 'AetherImportDataTask'},
'export_data_task': {'key': 'ExportDataTask', 'type': 'AetherExportDataTask'},
'data_transfer_sources': {'key': 'DataTransferSources', 'type': '{AetherDataTransferSource}'},
'data_transfer_sinks': {'key': 'DataTransferSinks', 'type': '{AetherDataTransferSink}'},
'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword task_type: Possible values include: "ImportData", "ExportData", "CopyData".
:paramtype task_type: str or ~flow.models.AetherDataTransferTaskType
:keyword compute_name:
:paramtype compute_name: str
:keyword copy_data_task:
:paramtype copy_data_task: ~flow.models.AetherCopyDataTask
:keyword import_data_task:
:paramtype import_data_task: ~flow.models.AetherImportDataTask
:keyword export_data_task:
:paramtype export_data_task: ~flow.models.AetherExportDataTask
:keyword data_transfer_sources: This is a dictionary.
:paramtype data_transfer_sources: dict[str, ~flow.models.AetherDataTransferSource]
:keyword data_transfer_sinks: This is a dictionary.
:paramtype data_transfer_sinks: dict[str, ~flow.models.AetherDataTransferSink]
:keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:paramtype data_copy_mode: str or ~flow.models.AetherDataCopyMode
"""
super(AetherDataTransferV2CloudSetting, self).__init__(**kwargs)
self.task_type = kwargs.get('task_type', None)
self.compute_name = kwargs.get('compute_name', None)
self.copy_data_task = kwargs.get('copy_data_task', None)
self.import_data_task = kwargs.get('import_data_task', None)
self.export_data_task = kwargs.get('export_data_task', None)
self.data_transfer_sources = kwargs.get('data_transfer_sources', None)
self.data_transfer_sinks = kwargs.get('data_transfer_sinks', None)
self.data_copy_mode = kwargs.get('data_copy_mode', None)
class AetherDBFSReference(msrest.serialization.Model):
"""AetherDBFSReference.
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherDBFSReference, self).__init__(**kwargs)
self.relative_path = kwargs.get('relative_path', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AetherDockerSettingConfiguration(msrest.serialization.Model):
"""AetherDockerSettingConfiguration.
:ivar use_docker:
:vartype use_docker: bool
:ivar shared_volumes:
:vartype shared_volumes: bool
:ivar shm_size:
:vartype shm_size: str
:ivar arguments:
:vartype arguments: list[str]
"""
_attribute_map = {
'use_docker': {'key': 'useDocker', 'type': 'bool'},
'shared_volumes': {'key': 'sharedVolumes', 'type': 'bool'},
'shm_size': {'key': 'shmSize', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword use_docker:
:paramtype use_docker: bool
:keyword shared_volumes:
:paramtype shared_volumes: bool
:keyword shm_size:
:paramtype shm_size: str
:keyword arguments:
:paramtype arguments: list[str]
"""
super(AetherDockerSettingConfiguration, self).__init__(**kwargs)
self.use_docker = kwargs.get('use_docker', None)
self.shared_volumes = kwargs.get('shared_volumes', None)
self.shm_size = kwargs.get('shm_size', None)
self.arguments = kwargs.get('arguments', None)
class AetherDoWhileControlFlowInfo(msrest.serialization.Model):
"""AetherDoWhileControlFlowInfo.
:ivar output_port_name_to_input_port_names_mapping: Dictionary of
<components·1f2aigm·schemas·aetherdowhilecontrolflowinfo·properties·outputportnametoinputportnamesmapping·additionalproperties>.
:vartype output_port_name_to_input_port_names_mapping: dict[str, list[str]]
:ivar condition_output_port_name:
:vartype condition_output_port_name: str
:ivar run_settings:
:vartype run_settings: ~flow.models.AetherDoWhileControlFlowRunSettings
"""
_attribute_map = {
'output_port_name_to_input_port_names_mapping': {'key': 'outputPortNameToInputPortNamesMapping', 'type': '{[str]}'},
'condition_output_port_name': {'key': 'conditionOutputPortName', 'type': 'str'},
'run_settings': {'key': 'runSettings', 'type': 'AetherDoWhileControlFlowRunSettings'},
}
def __init__(
self,
**kwargs
):
"""
:keyword output_port_name_to_input_port_names_mapping: Dictionary of
<components·1f2aigm·schemas·aetherdowhilecontrolflowinfo·properties·outputportnametoinputportnamesmapping·additionalproperties>.
:paramtype output_port_name_to_input_port_names_mapping: dict[str, list[str]]
:keyword condition_output_port_name:
:paramtype condition_output_port_name: str
:keyword run_settings:
:paramtype run_settings: ~flow.models.AetherDoWhileControlFlowRunSettings
"""
super(AetherDoWhileControlFlowInfo, self).__init__(**kwargs)
self.output_port_name_to_input_port_names_mapping = kwargs.get('output_port_name_to_input_port_names_mapping', None)
self.condition_output_port_name = kwargs.get('condition_output_port_name', None)
self.run_settings = kwargs.get('run_settings', None)
class AetherDoWhileControlFlowRunSettings(msrest.serialization.Model):
"""AetherDoWhileControlFlowRunSettings.
:ivar max_loop_iteration_count:
:vartype max_loop_iteration_count: ~flow.models.AetherParameterAssignment
"""
_attribute_map = {
'max_loop_iteration_count': {'key': 'maxLoopIterationCount', 'type': 'AetherParameterAssignment'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_loop_iteration_count:
:paramtype max_loop_iteration_count: ~flow.models.AetherParameterAssignment
"""
super(AetherDoWhileControlFlowRunSettings, self).__init__(**kwargs)
self.max_loop_iteration_count = kwargs.get('max_loop_iteration_count', None)
class AetherEntityInterfaceDocumentation(msrest.serialization.Model):
"""AetherEntityInterfaceDocumentation.
:ivar inputs_documentation: Dictionary of :code:`<string>`.
:vartype inputs_documentation: dict[str, str]
:ivar outputs_documentation: Dictionary of :code:`<string>`.
:vartype outputs_documentation: dict[str, str]
:ivar parameters_documentation: Dictionary of :code:`<string>`.
:vartype parameters_documentation: dict[str, str]
"""
_attribute_map = {
'inputs_documentation': {'key': 'inputsDocumentation', 'type': '{str}'},
'outputs_documentation': {'key': 'outputsDocumentation', 'type': '{str}'},
'parameters_documentation': {'key': 'parametersDocumentation', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword inputs_documentation: Dictionary of :code:`<string>`.
:paramtype inputs_documentation: dict[str, str]
:keyword outputs_documentation: Dictionary of :code:`<string>`.
:paramtype outputs_documentation: dict[str, str]
:keyword parameters_documentation: Dictionary of :code:`<string>`.
:paramtype parameters_documentation: dict[str, str]
"""
super(AetherEntityInterfaceDocumentation, self).__init__(**kwargs)
self.inputs_documentation = kwargs.get('inputs_documentation', None)
self.outputs_documentation = kwargs.get('outputs_documentation', None)
self.parameters_documentation = kwargs.get('parameters_documentation', None)
class AetherEntrySetting(msrest.serialization.Model):
"""AetherEntrySetting.
:ivar file:
:vartype file: str
:ivar class_name:
:vartype class_name: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'class_name': {'key': 'className', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword class_name:
:paramtype class_name: str
"""
super(AetherEntrySetting, self).__init__(**kwargs)
self.file = kwargs.get('file', None)
self.class_name = kwargs.get('class_name', None)
class AetherEnvironmentConfiguration(msrest.serialization.Model):
"""AetherEnvironmentConfiguration.
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar use_environment_definition:
:vartype use_environment_definition: bool
:ivar environment_definition_string:
:vartype environment_definition_string: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'use_environment_definition': {'key': 'useEnvironmentDefinition', 'type': 'bool'},
'environment_definition_string': {'key': 'environmentDefinitionString', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword use_environment_definition:
:paramtype use_environment_definition: bool
:keyword environment_definition_string:
:paramtype environment_definition_string: str
"""
super(AetherEnvironmentConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
self.use_environment_definition = kwargs.get('use_environment_definition', None)
self.environment_definition_string = kwargs.get('environment_definition_string', None)
class AetherEsCloudConfiguration(msrest.serialization.Model):
"""AetherEsCloudConfiguration.
:ivar enable_output_to_file_based_on_data_type_id:
:vartype enable_output_to_file_based_on_data_type_id: bool
:ivar aml_compute_priority_internal:
:vartype aml_compute_priority_internal: ~flow.models.AetherPriorityConfiguration
:ivar itp_priority_internal:
:vartype itp_priority_internal: ~flow.models.AetherPriorityConfiguration
:ivar singularity_priority_internal:
:vartype singularity_priority_internal: ~flow.models.AetherPriorityConfiguration
:ivar environment:
:vartype environment: ~flow.models.AetherEnvironmentConfiguration
:ivar hyper_drive_configuration:
:vartype hyper_drive_configuration: ~flow.models.AetherHyperDriveConfiguration
:ivar k8_s_config:
:vartype k8_s_config: ~flow.models.AetherK8SConfiguration
:ivar resource_config:
:vartype resource_config: ~flow.models.AetherResourceConfiguration
:ivar torch_distributed_config:
:vartype torch_distributed_config: ~flow.models.AetherTorchDistributedConfiguration
:ivar target_selector_config:
:vartype target_selector_config: ~flow.models.AetherTargetSelectorConfiguration
:ivar docker_config:
:vartype docker_config: ~flow.models.AetherDockerSettingConfiguration
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar max_run_duration_seconds:
:vartype max_run_duration_seconds: int
:ivar identity:
:vartype identity: ~flow.models.AetherIdentitySetting
:ivar application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:vartype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:ivar run_config:
:vartype run_config: str
"""
_attribute_map = {
'enable_output_to_file_based_on_data_type_id': {'key': 'enableOutputToFileBasedOnDataTypeId', 'type': 'bool'},
'aml_compute_priority_internal': {'key': 'amlComputePriorityInternal', 'type': 'AetherPriorityConfiguration'},
'itp_priority_internal': {'key': 'itpPriorityInternal', 'type': 'AetherPriorityConfiguration'},
'singularity_priority_internal': {'key': 'singularityPriorityInternal', 'type': 'AetherPriorityConfiguration'},
'environment': {'key': 'environment', 'type': 'AetherEnvironmentConfiguration'},
'hyper_drive_configuration': {'key': 'hyperDriveConfiguration', 'type': 'AetherHyperDriveConfiguration'},
'k8_s_config': {'key': 'k8sConfig', 'type': 'AetherK8SConfiguration'},
'resource_config': {'key': 'resourceConfig', 'type': 'AetherResourceConfiguration'},
'torch_distributed_config': {'key': 'torchDistributedConfig', 'type': 'AetherTorchDistributedConfiguration'},
'target_selector_config': {'key': 'targetSelectorConfig', 'type': 'AetherTargetSelectorConfiguration'},
'docker_config': {'key': 'dockerConfig', 'type': 'AetherDockerSettingConfiguration'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'int'},
'identity': {'key': 'identity', 'type': 'AetherIdentitySetting'},
'application_endpoints': {'key': 'applicationEndpoints', 'type': '{ApplicationEndpointConfiguration}'},
'run_config': {'key': 'runConfig', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword enable_output_to_file_based_on_data_type_id:
:paramtype enable_output_to_file_based_on_data_type_id: bool
:keyword aml_compute_priority_internal:
:paramtype aml_compute_priority_internal: ~flow.models.AetherPriorityConfiguration
:keyword itp_priority_internal:
:paramtype itp_priority_internal: ~flow.models.AetherPriorityConfiguration
:keyword singularity_priority_internal:
:paramtype singularity_priority_internal: ~flow.models.AetherPriorityConfiguration
:keyword environment:
:paramtype environment: ~flow.models.AetherEnvironmentConfiguration
:keyword hyper_drive_configuration:
:paramtype hyper_drive_configuration: ~flow.models.AetherHyperDriveConfiguration
:keyword k8_s_config:
:paramtype k8_s_config: ~flow.models.AetherK8SConfiguration
:keyword resource_config:
:paramtype resource_config: ~flow.models.AetherResourceConfiguration
:keyword torch_distributed_config:
:paramtype torch_distributed_config: ~flow.models.AetherTorchDistributedConfiguration
:keyword target_selector_config:
:paramtype target_selector_config: ~flow.models.AetherTargetSelectorConfiguration
:keyword docker_config:
:paramtype docker_config: ~flow.models.AetherDockerSettingConfiguration
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword max_run_duration_seconds:
:paramtype max_run_duration_seconds: int
:keyword identity:
:paramtype identity: ~flow.models.AetherIdentitySetting
:keyword application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:paramtype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:keyword run_config:
:paramtype run_config: str
"""
super(AetherEsCloudConfiguration, self).__init__(**kwargs)
self.enable_output_to_file_based_on_data_type_id = kwargs.get('enable_output_to_file_based_on_data_type_id', None)
self.aml_compute_priority_internal = kwargs.get('aml_compute_priority_internal', None)
self.itp_priority_internal = kwargs.get('itp_priority_internal', None)
self.singularity_priority_internal = kwargs.get('singularity_priority_internal', None)
self.environment = kwargs.get('environment', None)
self.hyper_drive_configuration = kwargs.get('hyper_drive_configuration', None)
self.k8_s_config = kwargs.get('k8_s_config', None)
self.resource_config = kwargs.get('resource_config', None)
self.torch_distributed_config = kwargs.get('torch_distributed_config', None)
self.target_selector_config = kwargs.get('target_selector_config', None)
self.docker_config = kwargs.get('docker_config', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.max_run_duration_seconds = kwargs.get('max_run_duration_seconds', None)
self.identity = kwargs.get('identity', None)
self.application_endpoints = kwargs.get('application_endpoints', None)
self.run_config = kwargs.get('run_config', None)
class AetherExportDataTask(msrest.serialization.Model):
"""AetherExportDataTask.
:ivar data_transfer_sink:
:vartype data_transfer_sink: ~flow.models.AetherDataTransferSink
"""
_attribute_map = {
'data_transfer_sink': {'key': 'DataTransferSink', 'type': 'AetherDataTransferSink'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_transfer_sink:
:paramtype data_transfer_sink: ~flow.models.AetherDataTransferSink
"""
super(AetherExportDataTask, self).__init__(**kwargs)
self.data_transfer_sink = kwargs.get('data_transfer_sink', None)
class AetherFeaturizationSettings(msrest.serialization.Model):
"""AetherFeaturizationSettings.
:ivar mode: Possible values include: "Auto", "Custom", "Off".
:vartype mode: str or ~flow.models.AetherFeaturizationMode
:ivar blocked_transformers:
:vartype blocked_transformers: list[str]
:ivar column_purposes: Dictionary of :code:`<string>`.
:vartype column_purposes: dict[str, str]
:ivar drop_columns:
:vartype drop_columns: list[str]
:ivar transformer_params: Dictionary of
<components·1y90i4m·schemas·aetherfeaturizationsettings·properties·transformerparams·additionalproperties>.
:vartype transformer_params: dict[str, list[~flow.models.AetherColumnTransformer]]
:ivar dataset_language:
:vartype dataset_language: str
:ivar enable_dnn_featurization:
:vartype enable_dnn_featurization: bool
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'blocked_transformers': {'key': 'blockedTransformers', 'type': '[str]'},
'column_purposes': {'key': 'columnPurposes', 'type': '{str}'},
'drop_columns': {'key': 'dropColumns', 'type': '[str]'},
'transformer_params': {'key': 'transformerParams', 'type': '{[AetherColumnTransformer]}'},
'dataset_language': {'key': 'datasetLanguage', 'type': 'str'},
'enable_dnn_featurization': {'key': 'enableDnnFeaturization', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom", "Off".
:paramtype mode: str or ~flow.models.AetherFeaturizationMode
:keyword blocked_transformers:
:paramtype blocked_transformers: list[str]
:keyword column_purposes: Dictionary of :code:`<string>`.
:paramtype column_purposes: dict[str, str]
:keyword drop_columns:
:paramtype drop_columns: list[str]
:keyword transformer_params: Dictionary of
<components·1y90i4m·schemas·aetherfeaturizationsettings·properties·transformerparams·additionalproperties>.
:paramtype transformer_params: dict[str, list[~flow.models.AetherColumnTransformer]]
:keyword dataset_language:
:paramtype dataset_language: str
:keyword enable_dnn_featurization:
:paramtype enable_dnn_featurization: bool
"""
super(AetherFeaturizationSettings, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.blocked_transformers = kwargs.get('blocked_transformers', None)
self.column_purposes = kwargs.get('column_purposes', None)
self.drop_columns = kwargs.get('drop_columns', None)
self.transformer_params = kwargs.get('transformer_params', None)
self.dataset_language = kwargs.get('dataset_language', None)
self.enable_dnn_featurization = kwargs.get('enable_dnn_featurization', None)
class AetherFileSystem(msrest.serialization.Model):
"""AetherFileSystem.
:ivar connection:
:vartype connection: str
:ivar path:
:vartype path: str
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword path:
:paramtype path: str
"""
super(AetherFileSystem, self).__init__(**kwargs)
self.connection = kwargs.get('connection', None)
self.path = kwargs.get('path', None)
class AetherForecastHorizon(msrest.serialization.Model):
"""AetherForecastHorizon.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.AetherForecastHorizonMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.AetherForecastHorizonMode
:keyword value:
:paramtype value: int
"""
super(AetherForecastHorizon, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class AetherForecastingSettings(msrest.serialization.Model):
"""AetherForecastingSettings.
:ivar country_or_region_for_holidays:
:vartype country_or_region_for_holidays: str
:ivar time_column_name:
:vartype time_column_name: str
:ivar target_lags:
:vartype target_lags: ~flow.models.AetherTargetLags
:ivar target_rolling_window_size:
:vartype target_rolling_window_size: ~flow.models.AetherTargetRollingWindowSize
:ivar forecast_horizon:
:vartype forecast_horizon: ~flow.models.AetherForecastHorizon
:ivar time_series_id_column_names:
:vartype time_series_id_column_names: list[str]
:ivar frequency:
:vartype frequency: str
:ivar feature_lags:
:vartype feature_lags: str
:ivar seasonality:
:vartype seasonality: ~flow.models.AetherSeasonality
:ivar short_series_handling_config: Possible values include: "Auto", "Pad", "Drop".
:vartype short_series_handling_config: str or
~flow.models.AetherShortSeriesHandlingConfiguration
:ivar use_stl: Possible values include: "Season", "SeasonTrend".
:vartype use_stl: str or ~flow.models.AetherUseStl
:ivar target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean".
:vartype target_aggregate_function: str or ~flow.models.AetherTargetAggregationFunction
:ivar cv_step_size:
:vartype cv_step_size: int
:ivar features_unknown_at_forecast_time:
:vartype features_unknown_at_forecast_time: list[str]
"""
_attribute_map = {
'country_or_region_for_holidays': {'key': 'countryOrRegionForHolidays', 'type': 'str'},
'time_column_name': {'key': 'timeColumnName', 'type': 'str'},
'target_lags': {'key': 'targetLags', 'type': 'AetherTargetLags'},
'target_rolling_window_size': {'key': 'targetRollingWindowSize', 'type': 'AetherTargetRollingWindowSize'},
'forecast_horizon': {'key': 'forecastHorizon', 'type': 'AetherForecastHorizon'},
'time_series_id_column_names': {'key': 'timeSeriesIdColumnNames', 'type': '[str]'},
'frequency': {'key': 'frequency', 'type': 'str'},
'feature_lags': {'key': 'featureLags', 'type': 'str'},
'seasonality': {'key': 'seasonality', 'type': 'AetherSeasonality'},
'short_series_handling_config': {'key': 'shortSeriesHandlingConfig', 'type': 'str'},
'use_stl': {'key': 'useStl', 'type': 'str'},
'target_aggregate_function': {'key': 'targetAggregateFunction', 'type': 'str'},
'cv_step_size': {'key': 'cvStepSize', 'type': 'int'},
'features_unknown_at_forecast_time': {'key': 'featuresUnknownAtForecastTime', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword country_or_region_for_holidays:
:paramtype country_or_region_for_holidays: str
:keyword time_column_name:
:paramtype time_column_name: str
:keyword target_lags:
:paramtype target_lags: ~flow.models.AetherTargetLags
:keyword target_rolling_window_size:
:paramtype target_rolling_window_size: ~flow.models.AetherTargetRollingWindowSize
:keyword forecast_horizon:
:paramtype forecast_horizon: ~flow.models.AetherForecastHorizon
:keyword time_series_id_column_names:
:paramtype time_series_id_column_names: list[str]
:keyword frequency:
:paramtype frequency: str
:keyword feature_lags:
:paramtype feature_lags: str
:keyword seasonality:
:paramtype seasonality: ~flow.models.AetherSeasonality
:keyword short_series_handling_config: Possible values include: "Auto", "Pad", "Drop".
:paramtype short_series_handling_config: str or
~flow.models.AetherShortSeriesHandlingConfiguration
:keyword use_stl: Possible values include: "Season", "SeasonTrend".
:paramtype use_stl: str or ~flow.models.AetherUseStl
:keyword target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean".
:paramtype target_aggregate_function: str or ~flow.models.AetherTargetAggregationFunction
:keyword cv_step_size:
:paramtype cv_step_size: int
:keyword features_unknown_at_forecast_time:
:paramtype features_unknown_at_forecast_time: list[str]
"""
super(AetherForecastingSettings, self).__init__(**kwargs)
self.country_or_region_for_holidays = kwargs.get('country_or_region_for_holidays', None)
self.time_column_name = kwargs.get('time_column_name', None)
self.target_lags = kwargs.get('target_lags', None)
self.target_rolling_window_size = kwargs.get('target_rolling_window_size', None)
self.forecast_horizon = kwargs.get('forecast_horizon', None)
self.time_series_id_column_names = kwargs.get('time_series_id_column_names', None)
self.frequency = kwargs.get('frequency', None)
self.feature_lags = kwargs.get('feature_lags', None)
self.seasonality = kwargs.get('seasonality', None)
self.short_series_handling_config = kwargs.get('short_series_handling_config', None)
self.use_stl = kwargs.get('use_stl', None)
self.target_aggregate_function = kwargs.get('target_aggregate_function', None)
self.cv_step_size = kwargs.get('cv_step_size', None)
self.features_unknown_at_forecast_time = kwargs.get('features_unknown_at_forecast_time', None)
class AetherGeneralSettings(msrest.serialization.Model):
"""AetherGeneralSettings.
:ivar primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall",
"AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation",
"NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError",
"NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou".
:vartype primary_metric: str or ~flow.models.AetherPrimaryMetrics
:ivar task_type: Possible values include: "Classification", "Regression", "Forecasting",
"ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection",
"ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER",
"TextClassificationMultilabel".
:vartype task_type: str or ~flow.models.AetherTaskType
:ivar log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error",
"Critical".
:vartype log_verbosity: str or ~flow.models.AetherLogVerbosity
"""
_attribute_map = {
'primary_metric': {'key': 'primaryMetric', 'type': 'str'},
'task_type': {'key': 'taskType', 'type': 'str'},
'log_verbosity': {'key': 'logVerbosity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall",
"AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation",
"NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError",
"NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou".
:paramtype primary_metric: str or ~flow.models.AetherPrimaryMetrics
:keyword task_type: Possible values include: "Classification", "Regression", "Forecasting",
"ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection",
"ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER",
"TextClassificationMultilabel".
:paramtype task_type: str or ~flow.models.AetherTaskType
:keyword log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error",
"Critical".
:paramtype log_verbosity: str or ~flow.models.AetherLogVerbosity
"""
super(AetherGeneralSettings, self).__init__(**kwargs)
self.primary_metric = kwargs.get('primary_metric', None)
self.task_type = kwargs.get('task_type', None)
self.log_verbosity = kwargs.get('log_verbosity', None)
class AetherGlobsOptions(msrest.serialization.Model):
"""AetherGlobsOptions.
:ivar glob_patterns:
:vartype glob_patterns: list[str]
"""
_attribute_map = {
'glob_patterns': {'key': 'globPatterns', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword glob_patterns:
:paramtype glob_patterns: list[str]
"""
super(AetherGlobsOptions, self).__init__(**kwargs)
self.glob_patterns = kwargs.get('glob_patterns', None)
class AetherGraphControlNode(msrest.serialization.Model):
"""AetherGraphControlNode.
:ivar id:
:vartype id: str
:ivar control_type: The only acceptable values to pass in are None and "IfElse". The default
value is None.
:vartype control_type: str
:ivar control_parameter:
:vartype control_parameter: ~flow.models.AetherParameterAssignment
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'control_type': {'key': 'controlType', 'type': 'str'},
'control_parameter': {'key': 'controlParameter', 'type': 'AetherParameterAssignment'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword control_type: The only acceptable values to pass in are None and "IfElse". The
default value is None.
:paramtype control_type: str
:keyword control_parameter:
:paramtype control_parameter: ~flow.models.AetherParameterAssignment
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(AetherGraphControlNode, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.control_type = kwargs.get('control_type', None)
self.control_parameter = kwargs.get('control_parameter', None)
self.run_attribution = kwargs.get('run_attribution', None)
class AetherGraphControlReferenceNode(msrest.serialization.Model):
"""AetherGraphControlReferenceNode.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar comment:
:vartype comment: str
:ivar control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:vartype control_flow_type: str or ~flow.models.AetherControlFlowType
:ivar reference_node_id:
:vartype reference_node_id: str
:ivar do_while_control_flow_info:
:vartype do_while_control_flow_info: ~flow.models.AetherDoWhileControlFlowInfo
:ivar parallel_for_control_flow_info:
:vartype parallel_for_control_flow_info: ~flow.models.AetherParallelForControlFlowInfo
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'control_flow_type': {'key': 'controlFlowType', 'type': 'str'},
'reference_node_id': {'key': 'referenceNodeId', 'type': 'str'},
'do_while_control_flow_info': {'key': 'doWhileControlFlowInfo', 'type': 'AetherDoWhileControlFlowInfo'},
'parallel_for_control_flow_info': {'key': 'parallelForControlFlowInfo', 'type': 'AetherParallelForControlFlowInfo'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword comment:
:paramtype comment: str
:keyword control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:paramtype control_flow_type: str or ~flow.models.AetherControlFlowType
:keyword reference_node_id:
:paramtype reference_node_id: str
:keyword do_while_control_flow_info:
:paramtype do_while_control_flow_info: ~flow.models.AetherDoWhileControlFlowInfo
:keyword parallel_for_control_flow_info:
:paramtype parallel_for_control_flow_info: ~flow.models.AetherParallelForControlFlowInfo
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(AetherGraphControlReferenceNode, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.comment = kwargs.get('comment', None)
self.control_flow_type = kwargs.get('control_flow_type', None)
self.reference_node_id = kwargs.get('reference_node_id', None)
self.do_while_control_flow_info = kwargs.get('do_while_control_flow_info', None)
self.parallel_for_control_flow_info = kwargs.get('parallel_for_control_flow_info', None)
self.run_attribution = kwargs.get('run_attribution', None)
class AetherGraphDatasetNode(msrest.serialization.Model):
"""AetherGraphDatasetNode.
:ivar id:
:vartype id: str
:ivar dataset_id:
:vartype dataset_id: str
:ivar data_path_parameter_name:
:vartype data_path_parameter_name: str
:ivar data_set_definition:
:vartype data_set_definition: ~flow.models.AetherDataSetDefinition
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'dataset_id': {'key': 'datasetId', 'type': 'str'},
'data_path_parameter_name': {'key': 'dataPathParameterName', 'type': 'str'},
'data_set_definition': {'key': 'dataSetDefinition', 'type': 'AetherDataSetDefinition'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword dataset_id:
:paramtype dataset_id: str
:keyword data_path_parameter_name:
:paramtype data_path_parameter_name: str
:keyword data_set_definition:
:paramtype data_set_definition: ~flow.models.AetherDataSetDefinition
"""
super(AetherGraphDatasetNode, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.dataset_id = kwargs.get('dataset_id', None)
self.data_path_parameter_name = kwargs.get('data_path_parameter_name', None)
self.data_set_definition = kwargs.get('data_set_definition', None)
class AetherGraphEdge(msrest.serialization.Model):
"""AetherGraphEdge.
:ivar source_output_port:
:vartype source_output_port: ~flow.models.AetherPortInfo
:ivar destination_input_port:
:vartype destination_input_port: ~flow.models.AetherPortInfo
"""
_attribute_map = {
'source_output_port': {'key': 'sourceOutputPort', 'type': 'AetherPortInfo'},
'destination_input_port': {'key': 'destinationInputPort', 'type': 'AetherPortInfo'},
}
def __init__(
self,
**kwargs
):
"""
:keyword source_output_port:
:paramtype source_output_port: ~flow.models.AetherPortInfo
:keyword destination_input_port:
:paramtype destination_input_port: ~flow.models.AetherPortInfo
"""
super(AetherGraphEdge, self).__init__(**kwargs)
self.source_output_port = kwargs.get('source_output_port', None)
self.destination_input_port = kwargs.get('destination_input_port', None)
class AetherGraphEntity(msrest.serialization.Model):
"""AetherGraphEntity.
:ivar module_nodes:
:vartype module_nodes: list[~flow.models.AetherGraphModuleNode]
:ivar dataset_nodes:
:vartype dataset_nodes: list[~flow.models.AetherGraphDatasetNode]
:ivar sub_graph_nodes:
:vartype sub_graph_nodes: list[~flow.models.AetherGraphReferenceNode]
:ivar control_reference_nodes:
:vartype control_reference_nodes: list[~flow.models.AetherGraphControlReferenceNode]
:ivar control_nodes:
:vartype control_nodes: list[~flow.models.AetherGraphControlNode]
:ivar edges:
:vartype edges: list[~flow.models.AetherGraphEdge]
:ivar default_compute:
:vartype default_compute: ~flow.models.AetherComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.AetherDatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting
:ivar parent_sub_graph_module_ids:
:vartype parent_sub_graph_module_ids: list[str]
:ivar id:
:vartype id: str
:ivar workspace_id:
:vartype workspace_id: str
:ivar etag:
:vartype etag: str
:ivar tags: A set of tags.
:vartype tags: list[str]
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.AetherEntityStatus
"""
_attribute_map = {
'module_nodes': {'key': 'moduleNodes', 'type': '[AetherGraphModuleNode]'},
'dataset_nodes': {'key': 'datasetNodes', 'type': '[AetherGraphDatasetNode]'},
'sub_graph_nodes': {'key': 'subGraphNodes', 'type': '[AetherGraphReferenceNode]'},
'control_reference_nodes': {'key': 'controlReferenceNodes', 'type': '[AetherGraphControlReferenceNode]'},
'control_nodes': {'key': 'controlNodes', 'type': '[AetherGraphControlNode]'},
'edges': {'key': 'edges', 'type': '[AetherGraphEdge]'},
'default_compute': {'key': 'defaultCompute', 'type': 'AetherComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'AetherDatastoreSetting'},
'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'AetherCloudPrioritySetting'},
'parent_sub_graph_module_ids': {'key': 'parentSubGraphModuleIds', 'type': '[str]'},
'id': {'key': 'id', 'type': 'str'},
'workspace_id': {'key': 'workspaceId', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword module_nodes:
:paramtype module_nodes: list[~flow.models.AetherGraphModuleNode]
:keyword dataset_nodes:
:paramtype dataset_nodes: list[~flow.models.AetherGraphDatasetNode]
:keyword sub_graph_nodes:
:paramtype sub_graph_nodes: list[~flow.models.AetherGraphReferenceNode]
:keyword control_reference_nodes:
:paramtype control_reference_nodes: list[~flow.models.AetherGraphControlReferenceNode]
:keyword control_nodes:
:paramtype control_nodes: list[~flow.models.AetherGraphControlNode]
:keyword edges:
:paramtype edges: list[~flow.models.AetherGraphEdge]
:keyword default_compute:
:paramtype default_compute: ~flow.models.AetherComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.AetherDatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting
:keyword parent_sub_graph_module_ids:
:paramtype parent_sub_graph_module_ids: list[str]
:keyword id:
:paramtype id: str
:keyword workspace_id:
:paramtype workspace_id: str
:keyword etag:
:paramtype etag: str
:keyword tags: A set of tags.
:paramtype tags: list[str]
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.AetherEntityStatus
"""
super(AetherGraphEntity, self).__init__(**kwargs)
self.module_nodes = kwargs.get('module_nodes', None)
self.dataset_nodes = kwargs.get('dataset_nodes', None)
self.sub_graph_nodes = kwargs.get('sub_graph_nodes', None)
self.control_reference_nodes = kwargs.get('control_reference_nodes', None)
self.control_nodes = kwargs.get('control_nodes', None)
self.edges = kwargs.get('edges', None)
self.default_compute = kwargs.get('default_compute', None)
self.default_datastore = kwargs.get('default_datastore', None)
self.default_cloud_priority = kwargs.get('default_cloud_priority', None)
self.parent_sub_graph_module_ids = kwargs.get('parent_sub_graph_module_ids', None)
self.id = kwargs.get('id', None)
self.workspace_id = kwargs.get('workspace_id', None)
self.etag = kwargs.get('etag', None)
self.tags = kwargs.get('tags', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
self.entity_status = kwargs.get('entity_status', None)
class AetherGraphModuleNode(msrest.serialization.Model):
"""AetherGraphModuleNode.
:ivar cloud_priority:
:vartype cloud_priority: int
:ivar default_data_retention_hint:
:vartype default_data_retention_hint: int
:ivar compliance_cluster:
:vartype compliance_cluster: str
:ivar euclid_workspace_id:
:vartype euclid_workspace_id: str
:ivar attached_modules:
:vartype attached_modules: list[str]
:ivar acceptable_machine_clusters:
:vartype acceptable_machine_clusters: list[str]
:ivar custom_data_location_id:
:vartype custom_data_location_id: str
:ivar alert_timeout_duration:
:vartype alert_timeout_duration: str
:ivar runconfig:
:vartype runconfig: str
:ivar id:
:vartype id: str
:ivar module_id:
:vartype module_id: str
:ivar comment:
:vartype comment: str
:ivar name:
:vartype name: str
:ivar module_parameters:
:vartype module_parameters: list[~flow.models.AetherParameterAssignment]
:ivar module_metadata_parameters:
:vartype module_metadata_parameters: list[~flow.models.AetherParameterAssignment]
:ivar module_output_settings:
:vartype module_output_settings: list[~flow.models.AetherOutputSetting]
:ivar module_input_settings:
:vartype module_input_settings: list[~flow.models.AetherInputSetting]
:ivar use_graph_default_compute:
:vartype use_graph_default_compute: bool
:ivar use_graph_default_datastore:
:vartype use_graph_default_datastore: bool
:ivar regenerate_output:
:vartype regenerate_output: bool
:ivar control_inputs:
:vartype control_inputs: list[~flow.models.AetherControlInput]
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.AetherCloudSettings
:ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization".
:vartype execution_phase: str or ~flow.models.AetherExecutionPhase
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'cloud_priority': {'key': 'cloudPriority', 'type': 'int'},
'default_data_retention_hint': {'key': 'defaultDataRetentionHint', 'type': 'int'},
'compliance_cluster': {'key': 'complianceCluster', 'type': 'str'},
'euclid_workspace_id': {'key': 'euclidWorkspaceId', 'type': 'str'},
'attached_modules': {'key': 'attachedModules', 'type': '[str]'},
'acceptable_machine_clusters': {'key': 'acceptableMachineClusters', 'type': '[str]'},
'custom_data_location_id': {'key': 'customDataLocationId', 'type': 'str'},
'alert_timeout_duration': {'key': 'alertTimeoutDuration', 'type': 'str'},
'runconfig': {'key': 'runconfig', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'module_parameters': {'key': 'moduleParameters', 'type': '[AetherParameterAssignment]'},
'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[AetherParameterAssignment]'},
'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[AetherOutputSetting]'},
'module_input_settings': {'key': 'moduleInputSettings', 'type': '[AetherInputSetting]'},
'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'},
'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'},
'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'},
'control_inputs': {'key': 'controlInputs', 'type': '[AetherControlInput]'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'AetherCloudSettings'},
'execution_phase': {'key': 'executionPhase', 'type': 'str'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword cloud_priority:
:paramtype cloud_priority: int
:keyword default_data_retention_hint:
:paramtype default_data_retention_hint: int
:keyword compliance_cluster:
:paramtype compliance_cluster: str
:keyword euclid_workspace_id:
:paramtype euclid_workspace_id: str
:keyword attached_modules:
:paramtype attached_modules: list[str]
:keyword acceptable_machine_clusters:
:paramtype acceptable_machine_clusters: list[str]
:keyword custom_data_location_id:
:paramtype custom_data_location_id: str
:keyword alert_timeout_duration:
:paramtype alert_timeout_duration: str
:keyword runconfig:
:paramtype runconfig: str
:keyword id:
:paramtype id: str
:keyword module_id:
:paramtype module_id: str
:keyword comment:
:paramtype comment: str
:keyword name:
:paramtype name: str
:keyword module_parameters:
:paramtype module_parameters: list[~flow.models.AetherParameterAssignment]
:keyword module_metadata_parameters:
:paramtype module_metadata_parameters: list[~flow.models.AetherParameterAssignment]
:keyword module_output_settings:
:paramtype module_output_settings: list[~flow.models.AetherOutputSetting]
:keyword module_input_settings:
:paramtype module_input_settings: list[~flow.models.AetherInputSetting]
:keyword use_graph_default_compute:
:paramtype use_graph_default_compute: bool
:keyword use_graph_default_datastore:
:paramtype use_graph_default_datastore: bool
:keyword regenerate_output:
:paramtype regenerate_output: bool
:keyword control_inputs:
:paramtype control_inputs: list[~flow.models.AetherControlInput]
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.AetherCloudSettings
:keyword execution_phase: Possible values include: "Execution", "Initialization",
"Finalization".
:paramtype execution_phase: str or ~flow.models.AetherExecutionPhase
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(AetherGraphModuleNode, self).__init__(**kwargs)
self.cloud_priority = kwargs.get('cloud_priority', None)
self.default_data_retention_hint = kwargs.get('default_data_retention_hint', None)
self.compliance_cluster = kwargs.get('compliance_cluster', None)
self.euclid_workspace_id = kwargs.get('euclid_workspace_id', None)
self.attached_modules = kwargs.get('attached_modules', None)
self.acceptable_machine_clusters = kwargs.get('acceptable_machine_clusters', None)
self.custom_data_location_id = kwargs.get('custom_data_location_id', None)
self.alert_timeout_duration = kwargs.get('alert_timeout_duration', None)
self.runconfig = kwargs.get('runconfig', None)
self.id = kwargs.get('id', None)
self.module_id = kwargs.get('module_id', None)
self.comment = kwargs.get('comment', None)
self.name = kwargs.get('name', None)
self.module_parameters = kwargs.get('module_parameters', None)
self.module_metadata_parameters = kwargs.get('module_metadata_parameters', None)
self.module_output_settings = kwargs.get('module_output_settings', None)
self.module_input_settings = kwargs.get('module_input_settings', None)
self.use_graph_default_compute = kwargs.get('use_graph_default_compute', None)
self.use_graph_default_datastore = kwargs.get('use_graph_default_datastore', None)
self.regenerate_output = kwargs.get('regenerate_output', None)
self.control_inputs = kwargs.get('control_inputs', None)
self.cloud_settings = kwargs.get('cloud_settings', None)
self.execution_phase = kwargs.get('execution_phase', None)
self.run_attribution = kwargs.get('run_attribution', None)
class AetherGraphReferenceNode(msrest.serialization.Model):
"""AetherGraphReferenceNode.
:ivar graph_id:
:vartype graph_id: str
:ivar default_compute:
:vartype default_compute: ~flow.models.AetherComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.AetherDatastoreSetting
:ivar id:
:vartype id: str
:ivar module_id:
:vartype module_id: str
:ivar comment:
:vartype comment: str
:ivar name:
:vartype name: str
:ivar module_parameters:
:vartype module_parameters: list[~flow.models.AetherParameterAssignment]
:ivar module_metadata_parameters:
:vartype module_metadata_parameters: list[~flow.models.AetherParameterAssignment]
:ivar module_output_settings:
:vartype module_output_settings: list[~flow.models.AetherOutputSetting]
:ivar module_input_settings:
:vartype module_input_settings: list[~flow.models.AetherInputSetting]
:ivar use_graph_default_compute:
:vartype use_graph_default_compute: bool
:ivar use_graph_default_datastore:
:vartype use_graph_default_datastore: bool
:ivar regenerate_output:
:vartype regenerate_output: bool
:ivar control_inputs:
:vartype control_inputs: list[~flow.models.AetherControlInput]
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.AetherCloudSettings
:ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization".
:vartype execution_phase: str or ~flow.models.AetherExecutionPhase
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'graph_id': {'key': 'graphId', 'type': 'str'},
'default_compute': {'key': 'defaultCompute', 'type': 'AetherComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'AetherDatastoreSetting'},
'id': {'key': 'id', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'module_parameters': {'key': 'moduleParameters', 'type': '[AetherParameterAssignment]'},
'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[AetherParameterAssignment]'},
'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[AetherOutputSetting]'},
'module_input_settings': {'key': 'moduleInputSettings', 'type': '[AetherInputSetting]'},
'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'},
'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'},
'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'},
'control_inputs': {'key': 'controlInputs', 'type': '[AetherControlInput]'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'AetherCloudSettings'},
'execution_phase': {'key': 'executionPhase', 'type': 'str'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword graph_id:
:paramtype graph_id: str
:keyword default_compute:
:paramtype default_compute: ~flow.models.AetherComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.AetherDatastoreSetting
:keyword id:
:paramtype id: str
:keyword module_id:
:paramtype module_id: str
:keyword comment:
:paramtype comment: str
:keyword name:
:paramtype name: str
:keyword module_parameters:
:paramtype module_parameters: list[~flow.models.AetherParameterAssignment]
:keyword module_metadata_parameters:
:paramtype module_metadata_parameters: list[~flow.models.AetherParameterAssignment]
:keyword module_output_settings:
:paramtype module_output_settings: list[~flow.models.AetherOutputSetting]
:keyword module_input_settings:
:paramtype module_input_settings: list[~flow.models.AetherInputSetting]
:keyword use_graph_default_compute:
:paramtype use_graph_default_compute: bool
:keyword use_graph_default_datastore:
:paramtype use_graph_default_datastore: bool
:keyword regenerate_output:
:paramtype regenerate_output: bool
:keyword control_inputs:
:paramtype control_inputs: list[~flow.models.AetherControlInput]
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.AetherCloudSettings
:keyword execution_phase: Possible values include: "Execution", "Initialization",
"Finalization".
:paramtype execution_phase: str or ~flow.models.AetherExecutionPhase
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(AetherGraphReferenceNode, self).__init__(**kwargs)
self.graph_id = kwargs.get('graph_id', None)
self.default_compute = kwargs.get('default_compute', None)
self.default_datastore = kwargs.get('default_datastore', None)
self.id = kwargs.get('id', None)
self.module_id = kwargs.get('module_id', None)
self.comment = kwargs.get('comment', None)
self.name = kwargs.get('name', None)
self.module_parameters = kwargs.get('module_parameters', None)
self.module_metadata_parameters = kwargs.get('module_metadata_parameters', None)
self.module_output_settings = kwargs.get('module_output_settings', None)
self.module_input_settings = kwargs.get('module_input_settings', None)
self.use_graph_default_compute = kwargs.get('use_graph_default_compute', None)
self.use_graph_default_datastore = kwargs.get('use_graph_default_datastore', None)
self.regenerate_output = kwargs.get('regenerate_output', None)
self.control_inputs = kwargs.get('control_inputs', None)
self.cloud_settings = kwargs.get('cloud_settings', None)
self.execution_phase = kwargs.get('execution_phase', None)
self.run_attribution = kwargs.get('run_attribution', None)
class AetherHdfsReference(msrest.serialization.Model):
"""AetherHdfsReference.
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(AetherHdfsReference, self).__init__(**kwargs)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
class AetherHdiClusterComputeInfo(msrest.serialization.Model):
"""AetherHdiClusterComputeInfo.
:ivar address:
:vartype address: str
:ivar username:
:vartype username: str
:ivar password:
:vartype password: str
:ivar private_key:
:vartype private_key: str
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'private_key': {'key': 'privateKey', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword address:
:paramtype address: str
:keyword username:
:paramtype username: str
:keyword password:
:paramtype password: str
:keyword private_key:
:paramtype private_key: str
"""
super(AetherHdiClusterComputeInfo, self).__init__(**kwargs)
self.address = kwargs.get('address', None)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.private_key = kwargs.get('private_key', None)
class AetherHdiRunConfiguration(msrest.serialization.Model):
"""AetherHdiRunConfiguration.
:ivar file:
:vartype file: str
:ivar class_name:
:vartype class_name: str
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar py_files:
:vartype py_files: list[str]
:ivar compute_name:
:vartype compute_name: str
:ivar queue:
:vartype queue: str
:ivar driver_memory:
:vartype driver_memory: str
:ivar driver_cores:
:vartype driver_cores: int
:ivar executor_memory:
:vartype executor_memory: str
:ivar executor_cores:
:vartype executor_cores: int
:ivar number_executors:
:vartype number_executors: int
:ivar conf: Dictionary of :code:`<string>`.
:vartype conf: dict[str, str]
:ivar name:
:vartype name: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'class_name': {'key': 'className', 'type': 'str'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'compute_name': {'key': 'computeName', 'type': 'str'},
'queue': {'key': 'queue', 'type': 'str'},
'driver_memory': {'key': 'driverMemory', 'type': 'str'},
'driver_cores': {'key': 'driverCores', 'type': 'int'},
'executor_memory': {'key': 'executorMemory', 'type': 'str'},
'executor_cores': {'key': 'executorCores', 'type': 'int'},
'number_executors': {'key': 'numberExecutors', 'type': 'int'},
'conf': {'key': 'conf', 'type': '{str}'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword class_name:
:paramtype class_name: str
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword py_files:
:paramtype py_files: list[str]
:keyword compute_name:
:paramtype compute_name: str
:keyword queue:
:paramtype queue: str
:keyword driver_memory:
:paramtype driver_memory: str
:keyword driver_cores:
:paramtype driver_cores: int
:keyword executor_memory:
:paramtype executor_memory: str
:keyword executor_cores:
:paramtype executor_cores: int
:keyword number_executors:
:paramtype number_executors: int
:keyword conf: Dictionary of :code:`<string>`.
:paramtype conf: dict[str, str]
:keyword name:
:paramtype name: str
"""
super(AetherHdiRunConfiguration, self).__init__(**kwargs)
self.file = kwargs.get('file', None)
self.class_name = kwargs.get('class_name', None)
self.files = kwargs.get('files', None)
self.archives = kwargs.get('archives', None)
self.jars = kwargs.get('jars', None)
self.py_files = kwargs.get('py_files', None)
self.compute_name = kwargs.get('compute_name', None)
self.queue = kwargs.get('queue', None)
self.driver_memory = kwargs.get('driver_memory', None)
self.driver_cores = kwargs.get('driver_cores', None)
self.executor_memory = kwargs.get('executor_memory', None)
self.executor_cores = kwargs.get('executor_cores', None)
self.number_executors = kwargs.get('number_executors', None)
self.conf = kwargs.get('conf', None)
self.name = kwargs.get('name', None)
class AetherHyperDriveConfiguration(msrest.serialization.Model):
"""AetherHyperDriveConfiguration.
:ivar hyper_drive_run_config:
:vartype hyper_drive_run_config: str
:ivar primary_metric_goal:
:vartype primary_metric_goal: str
:ivar primary_metric_name:
:vartype primary_metric_name: str
:ivar arguments:
:vartype arguments: list[~flow.models.AetherArgumentAssignment]
"""
_attribute_map = {
'hyper_drive_run_config': {'key': 'hyperDriveRunConfig', 'type': 'str'},
'primary_metric_goal': {'key': 'primaryMetricGoal', 'type': 'str'},
'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': '[AetherArgumentAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword hyper_drive_run_config:
:paramtype hyper_drive_run_config: str
:keyword primary_metric_goal:
:paramtype primary_metric_goal: str
:keyword primary_metric_name:
:paramtype primary_metric_name: str
:keyword arguments:
:paramtype arguments: list[~flow.models.AetherArgumentAssignment]
"""
super(AetherHyperDriveConfiguration, self).__init__(**kwargs)
self.hyper_drive_run_config = kwargs.get('hyper_drive_run_config', None)
self.primary_metric_goal = kwargs.get('primary_metric_goal', None)
self.primary_metric_name = kwargs.get('primary_metric_name', None)
self.arguments = kwargs.get('arguments', None)
class AetherIdentitySetting(msrest.serialization.Model):
"""AetherIdentitySetting.
:ivar type: Possible values include: "UserIdentity", "Managed", "AMLToken".
:vartype type: str or ~flow.models.AetherIdentityType
:ivar client_id:
:vartype client_id: str
:ivar object_id:
:vartype object_id: str
:ivar msi_resource_id:
:vartype msi_resource_id: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
'object_id': {'key': 'objectId', 'type': 'str'},
'msi_resource_id': {'key': 'msiResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "UserIdentity", "Managed", "AMLToken".
:paramtype type: str or ~flow.models.AetherIdentityType
:keyword client_id:
:paramtype client_id: str
:keyword object_id:
:paramtype object_id: str
:keyword msi_resource_id:
:paramtype msi_resource_id: str
"""
super(AetherIdentitySetting, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.client_id = kwargs.get('client_id', None)
self.object_id = kwargs.get('object_id', None)
self.msi_resource_id = kwargs.get('msi_resource_id', None)
class AetherImportDataTask(msrest.serialization.Model):
"""AetherImportDataTask.
:ivar data_transfer_source:
:vartype data_transfer_source: ~flow.models.AetherDataTransferSource
"""
_attribute_map = {
'data_transfer_source': {'key': 'DataTransferSource', 'type': 'AetherDataTransferSource'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_transfer_source:
:paramtype data_transfer_source: ~flow.models.AetherDataTransferSource
"""
super(AetherImportDataTask, self).__init__(**kwargs)
self.data_transfer_source = kwargs.get('data_transfer_source', None)
class AetherInputSetting(msrest.serialization.Model):
"""AetherInputSetting.
:ivar name:
:vartype name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar options: This is a dictionary.
:vartype options: dict[str, str]
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'options': {'key': 'options', 'type': '{str}'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword options: This is a dictionary.
:paramtype options: dict[str, str]
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(AetherInputSetting, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.options = kwargs.get('options', None)
self.additional_transformations = kwargs.get('additional_transformations', None)
class AetherInteractiveConfig(msrest.serialization.Model):
"""AetherInteractiveConfig.
:ivar is_ssh_enabled:
:vartype is_ssh_enabled: bool
:ivar ssh_public_key:
:vartype ssh_public_key: str
:ivar is_i_python_enabled:
:vartype is_i_python_enabled: bool
:ivar is_tensor_board_enabled:
:vartype is_tensor_board_enabled: bool
:ivar interactive_port:
:vartype interactive_port: int
"""
_attribute_map = {
'is_ssh_enabled': {'key': 'isSSHEnabled', 'type': 'bool'},
'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'},
'is_i_python_enabled': {'key': 'isIPythonEnabled', 'type': 'bool'},
'is_tensor_board_enabled': {'key': 'isTensorBoardEnabled', 'type': 'bool'},
'interactive_port': {'key': 'interactivePort', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword is_ssh_enabled:
:paramtype is_ssh_enabled: bool
:keyword ssh_public_key:
:paramtype ssh_public_key: str
:keyword is_i_python_enabled:
:paramtype is_i_python_enabled: bool
:keyword is_tensor_board_enabled:
:paramtype is_tensor_board_enabled: bool
:keyword interactive_port:
:paramtype interactive_port: int
"""
super(AetherInteractiveConfig, self).__init__(**kwargs)
self.is_ssh_enabled = kwargs.get('is_ssh_enabled', None)
self.ssh_public_key = kwargs.get('ssh_public_key', None)
self.is_i_python_enabled = kwargs.get('is_i_python_enabled', None)
self.is_tensor_board_enabled = kwargs.get('is_tensor_board_enabled', None)
self.interactive_port = kwargs.get('interactive_port', None)
class AetherK8SConfiguration(msrest.serialization.Model):
"""AetherK8SConfiguration.
:ivar max_retry_count:
:vartype max_retry_count: int
:ivar resource_configuration:
:vartype resource_configuration: ~flow.models.AetherResourceConfig
:ivar priority_configuration:
:vartype priority_configuration: ~flow.models.AetherPriorityConfig
:ivar interactive_configuration:
:vartype interactive_configuration: ~flow.models.AetherInteractiveConfig
"""
_attribute_map = {
'max_retry_count': {'key': 'maxRetryCount', 'type': 'int'},
'resource_configuration': {'key': 'resourceConfiguration', 'type': 'AetherResourceConfig'},
'priority_configuration': {'key': 'priorityConfiguration', 'type': 'AetherPriorityConfig'},
'interactive_configuration': {'key': 'interactiveConfiguration', 'type': 'AetherInteractiveConfig'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_retry_count:
:paramtype max_retry_count: int
:keyword resource_configuration:
:paramtype resource_configuration: ~flow.models.AetherResourceConfig
:keyword priority_configuration:
:paramtype priority_configuration: ~flow.models.AetherPriorityConfig
:keyword interactive_configuration:
:paramtype interactive_configuration: ~flow.models.AetherInteractiveConfig
"""
super(AetherK8SConfiguration, self).__init__(**kwargs)
self.max_retry_count = kwargs.get('max_retry_count', None)
self.resource_configuration = kwargs.get('resource_configuration', None)
self.priority_configuration = kwargs.get('priority_configuration', None)
self.interactive_configuration = kwargs.get('interactive_configuration', None)
class AetherLegacyDataPath(msrest.serialization.Model):
"""AetherLegacyDataPath.
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword relative_path:
:paramtype relative_path: str
"""
super(AetherLegacyDataPath, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.relative_path = kwargs.get('relative_path', None)
class AetherLimitSettings(msrest.serialization.Model):
"""AetherLimitSettings.
:ivar max_trials:
:vartype max_trials: int
:ivar timeout:
:vartype timeout: str
:ivar trial_timeout:
:vartype trial_timeout: str
:ivar max_concurrent_trials:
:vartype max_concurrent_trials: int
:ivar max_cores_per_trial:
:vartype max_cores_per_trial: int
:ivar exit_score:
:vartype exit_score: float
:ivar enable_early_termination:
:vartype enable_early_termination: bool
:ivar max_nodes:
:vartype max_nodes: int
"""
_attribute_map = {
'max_trials': {'key': 'maxTrials', 'type': 'int'},
'timeout': {'key': 'timeout', 'type': 'str'},
'trial_timeout': {'key': 'trialTimeout', 'type': 'str'},
'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'},
'max_cores_per_trial': {'key': 'maxCoresPerTrial', 'type': 'int'},
'exit_score': {'key': 'exitScore', 'type': 'float'},
'enable_early_termination': {'key': 'enableEarlyTermination', 'type': 'bool'},
'max_nodes': {'key': 'maxNodes', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_trials:
:paramtype max_trials: int
:keyword timeout:
:paramtype timeout: str
:keyword trial_timeout:
:paramtype trial_timeout: str
:keyword max_concurrent_trials:
:paramtype max_concurrent_trials: int
:keyword max_cores_per_trial:
:paramtype max_cores_per_trial: int
:keyword exit_score:
:paramtype exit_score: float
:keyword enable_early_termination:
:paramtype enable_early_termination: bool
:keyword max_nodes:
:paramtype max_nodes: int
"""
super(AetherLimitSettings, self).__init__(**kwargs)
self.max_trials = kwargs.get('max_trials', None)
self.timeout = kwargs.get('timeout', None)
self.trial_timeout = kwargs.get('trial_timeout', None)
self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None)
self.max_cores_per_trial = kwargs.get('max_cores_per_trial', None)
self.exit_score = kwargs.get('exit_score', None)
self.enable_early_termination = kwargs.get('enable_early_termination', None)
self.max_nodes = kwargs.get('max_nodes', None)
class AetherMlcComputeInfo(msrest.serialization.Model):
"""AetherMlcComputeInfo.
:ivar mlc_compute_type:
:vartype mlc_compute_type: str
"""
_attribute_map = {
'mlc_compute_type': {'key': 'mlcComputeType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mlc_compute_type:
:paramtype mlc_compute_type: str
"""
super(AetherMlcComputeInfo, self).__init__(**kwargs)
self.mlc_compute_type = kwargs.get('mlc_compute_type', None)
class AetherModuleEntity(msrest.serialization.Model):
"""AetherModuleEntity.
:ivar last_updated_by:
:vartype last_updated_by: ~flow.models.AetherCreatedBy
:ivar display_name:
:vartype display_name: str
:ivar module_execution_type:
:vartype module_execution_type: str
:ivar module_type: Possible values include: "None", "BatchInferencing".
:vartype module_type: str or ~flow.models.AetherModuleType
:ivar module_type_version:
:vartype module_type_version: str
:ivar resource_requirements:
:vartype resource_requirements: ~flow.models.AetherResourceModel
:ivar machine_cluster:
:vartype machine_cluster: list[str]
:ivar default_compliance_cluster:
:vartype default_compliance_cluster: str
:ivar repository_type: Possible values include: "None", "Other", "Git", "SourceDepot",
"Cosmos".
:vartype repository_type: str or ~flow.models.AetherRepositoryType
:ivar relative_path_to_source_code:
:vartype relative_path_to_source_code: str
:ivar commit_id:
:vartype commit_id: str
:ivar code_review_link:
:vartype code_review_link: str
:ivar unit_tests_available:
:vartype unit_tests_available: bool
:ivar is_compressed:
:vartype is_compressed: bool
:ivar execution_environment: Possible values include: "ExeWorkerMachine",
"DockerContainerWithoutNetwork", "DockerContainerWithNetwork", "HyperVWithoutNetwork",
"HyperVWithNetwork".
:vartype execution_environment: str or ~flow.models.AetherExecutionEnvironment
:ivar is_output_markup_enabled:
:vartype is_output_markup_enabled: bool
:ivar docker_image_id:
:vartype docker_image_id: str
:ivar docker_image_reference:
:vartype docker_image_reference: str
:ivar docker_image_security_groups:
:vartype docker_image_security_groups: str
:ivar extended_properties:
:vartype extended_properties: ~flow.models.AetherModuleExtendedProperties
:ivar deployment_source: Possible values include: "Client", "AutoDeployment", "Vsts".
:vartype deployment_source: str or ~flow.models.AetherModuleDeploymentSource
:ivar deployment_source_metadata:
:vartype deployment_source_metadata: str
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
:ivar kv_tags: This is a dictionary.
:vartype kv_tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar created_by:
:vartype created_by: ~flow.models.AetherCreatedBy
:ivar runconfig:
:vartype runconfig: str
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.AetherCloudSettings
:ivar category:
:vartype category: str
:ivar step_type:
:vartype step_type: str
:ivar stage:
:vartype stage: str
:ivar upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed".
:vartype upload_state: str or ~flow.models.AetherUploadState
:ivar source_code_location:
:vartype source_code_location: str
:ivar size_in_bytes:
:vartype size_in_bytes: long
:ivar download_location:
:vartype download_location: str
:ivar data_location:
:vartype data_location: ~flow.models.AetherDataLocation
:ivar scripting_runtime_id:
:vartype scripting_runtime_id: str
:ivar interface_documentation:
:vartype interface_documentation: ~flow.models.AetherEntityInterfaceDocumentation
:ivar is_eyes_on:
:vartype is_eyes_on: bool
:ivar compliance_cluster:
:vartype compliance_cluster: str
:ivar is_deterministic:
:vartype is_deterministic: bool
:ivar information_url:
:vartype information_url: str
:ivar is_experiment_id_in_parameters:
:vartype is_experiment_id_in_parameters: bool
:ivar interface_string:
:vartype interface_string: str
:ivar default_parameters: This is a dictionary.
:vartype default_parameters: dict[str, str]
:ivar structured_interface:
:vartype structured_interface: ~flow.models.AetherStructuredInterface
:ivar family_id:
:vartype family_id: str
:ivar name:
:vartype name: str
:ivar hash:
:vartype hash: str
:ivar description:
:vartype description: str
:ivar version:
:vartype version: str
:ivar sequence_number_in_family:
:vartype sequence_number_in_family: int
:ivar owner:
:vartype owner: str
:ivar azure_tenant_id:
:vartype azure_tenant_id: str
:ivar azure_user_id:
:vartype azure_user_id: str
:ivar collaborators:
:vartype collaborators: list[str]
:ivar id:
:vartype id: str
:ivar workspace_id:
:vartype workspace_id: str
:ivar etag:
:vartype etag: str
:ivar tags: A set of tags.
:vartype tags: list[str]
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.AetherEntityStatus
"""
_attribute_map = {
'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'AetherCreatedBy'},
'display_name': {'key': 'displayName', 'type': 'str'},
'module_execution_type': {'key': 'moduleExecutionType', 'type': 'str'},
'module_type': {'key': 'moduleType', 'type': 'str'},
'module_type_version': {'key': 'moduleTypeVersion', 'type': 'str'},
'resource_requirements': {'key': 'resourceRequirements', 'type': 'AetherResourceModel'},
'machine_cluster': {'key': 'machineCluster', 'type': '[str]'},
'default_compliance_cluster': {'key': 'defaultComplianceCluster', 'type': 'str'},
'repository_type': {'key': 'repositoryType', 'type': 'str'},
'relative_path_to_source_code': {'key': 'relativePathToSourceCode', 'type': 'str'},
'commit_id': {'key': 'commitId', 'type': 'str'},
'code_review_link': {'key': 'codeReviewLink', 'type': 'str'},
'unit_tests_available': {'key': 'unitTestsAvailable', 'type': 'bool'},
'is_compressed': {'key': 'isCompressed', 'type': 'bool'},
'execution_environment': {'key': 'executionEnvironment', 'type': 'str'},
'is_output_markup_enabled': {'key': 'isOutputMarkupEnabled', 'type': 'bool'},
'docker_image_id': {'key': 'dockerImageId', 'type': 'str'},
'docker_image_reference': {'key': 'dockerImageReference', 'type': 'str'},
'docker_image_security_groups': {'key': 'dockerImageSecurityGroups', 'type': 'str'},
'extended_properties': {'key': 'extendedProperties', 'type': 'AetherModuleExtendedProperties'},
'deployment_source': {'key': 'deploymentSource', 'type': 'str'},
'deployment_source_metadata': {'key': 'deploymentSourceMetadata', 'type': 'str'},
'identifier_hash': {'key': 'identifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'identifierHashV2', 'type': 'str'},
'kv_tags': {'key': 'kvTags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'created_by': {'key': 'createdBy', 'type': 'AetherCreatedBy'},
'runconfig': {'key': 'runconfig', 'type': 'str'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'AetherCloudSettings'},
'category': {'key': 'category', 'type': 'str'},
'step_type': {'key': 'stepType', 'type': 'str'},
'stage': {'key': 'stage', 'type': 'str'},
'upload_state': {'key': 'uploadState', 'type': 'str'},
'source_code_location': {'key': 'sourceCodeLocation', 'type': 'str'},
'size_in_bytes': {'key': 'sizeInBytes', 'type': 'long'},
'download_location': {'key': 'downloadLocation', 'type': 'str'},
'data_location': {'key': 'dataLocation', 'type': 'AetherDataLocation'},
'scripting_runtime_id': {'key': 'scriptingRuntimeId', 'type': 'str'},
'interface_documentation': {'key': 'interfaceDocumentation', 'type': 'AetherEntityInterfaceDocumentation'},
'is_eyes_on': {'key': 'isEyesOn', 'type': 'bool'},
'compliance_cluster': {'key': 'complianceCluster', 'type': 'str'},
'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
'information_url': {'key': 'informationUrl', 'type': 'str'},
'is_experiment_id_in_parameters': {'key': 'isExperimentIdInParameters', 'type': 'bool'},
'interface_string': {'key': 'interfaceString', 'type': 'str'},
'default_parameters': {'key': 'defaultParameters', 'type': '{str}'},
'structured_interface': {'key': 'structuredInterface', 'type': 'AetherStructuredInterface'},
'family_id': {'key': 'familyId', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'hash': {'key': 'hash', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'sequence_number_in_family': {'key': 'sequenceNumberInFamily', 'type': 'int'},
'owner': {'key': 'owner', 'type': 'str'},
'azure_tenant_id': {'key': 'azureTenantId', 'type': 'str'},
'azure_user_id': {'key': 'azureUserId', 'type': 'str'},
'collaborators': {'key': 'collaborators', 'type': '[str]'},
'id': {'key': 'id', 'type': 'str'},
'workspace_id': {'key': 'workspaceId', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword last_updated_by:
:paramtype last_updated_by: ~flow.models.AetherCreatedBy
:keyword display_name:
:paramtype display_name: str
:keyword module_execution_type:
:paramtype module_execution_type: str
:keyword module_type: Possible values include: "None", "BatchInferencing".
:paramtype module_type: str or ~flow.models.AetherModuleType
:keyword module_type_version:
:paramtype module_type_version: str
:keyword resource_requirements:
:paramtype resource_requirements: ~flow.models.AetherResourceModel
:keyword machine_cluster:
:paramtype machine_cluster: list[str]
:keyword default_compliance_cluster:
:paramtype default_compliance_cluster: str
:keyword repository_type: Possible values include: "None", "Other", "Git", "SourceDepot",
"Cosmos".
:paramtype repository_type: str or ~flow.models.AetherRepositoryType
:keyword relative_path_to_source_code:
:paramtype relative_path_to_source_code: str
:keyword commit_id:
:paramtype commit_id: str
:keyword code_review_link:
:paramtype code_review_link: str
:keyword unit_tests_available:
:paramtype unit_tests_available: bool
:keyword is_compressed:
:paramtype is_compressed: bool
:keyword execution_environment: Possible values include: "ExeWorkerMachine",
"DockerContainerWithoutNetwork", "DockerContainerWithNetwork", "HyperVWithoutNetwork",
"HyperVWithNetwork".
:paramtype execution_environment: str or ~flow.models.AetherExecutionEnvironment
:keyword is_output_markup_enabled:
:paramtype is_output_markup_enabled: bool
:keyword docker_image_id:
:paramtype docker_image_id: str
:keyword docker_image_reference:
:paramtype docker_image_reference: str
:keyword docker_image_security_groups:
:paramtype docker_image_security_groups: str
:keyword extended_properties:
:paramtype extended_properties: ~flow.models.AetherModuleExtendedProperties
:keyword deployment_source: Possible values include: "Client", "AutoDeployment", "Vsts".
:paramtype deployment_source: str or ~flow.models.AetherModuleDeploymentSource
:keyword deployment_source_metadata:
:paramtype deployment_source_metadata: str
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
:keyword kv_tags: This is a dictionary.
:paramtype kv_tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword created_by:
:paramtype created_by: ~flow.models.AetherCreatedBy
:keyword runconfig:
:paramtype runconfig: str
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.AetherCloudSettings
:keyword category:
:paramtype category: str
:keyword step_type:
:paramtype step_type: str
:keyword stage:
:paramtype stage: str
:keyword upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed".
:paramtype upload_state: str or ~flow.models.AetherUploadState
:keyword source_code_location:
:paramtype source_code_location: str
:keyword size_in_bytes:
:paramtype size_in_bytes: long
:keyword download_location:
:paramtype download_location: str
:keyword data_location:
:paramtype data_location: ~flow.models.AetherDataLocation
:keyword scripting_runtime_id:
:paramtype scripting_runtime_id: str
:keyword interface_documentation:
:paramtype interface_documentation: ~flow.models.AetherEntityInterfaceDocumentation
:keyword is_eyes_on:
:paramtype is_eyes_on: bool
:keyword compliance_cluster:
:paramtype compliance_cluster: str
:keyword is_deterministic:
:paramtype is_deterministic: bool
:keyword information_url:
:paramtype information_url: str
:keyword is_experiment_id_in_parameters:
:paramtype is_experiment_id_in_parameters: bool
:keyword interface_string:
:paramtype interface_string: str
:keyword default_parameters: This is a dictionary.
:paramtype default_parameters: dict[str, str]
:keyword structured_interface:
:paramtype structured_interface: ~flow.models.AetherStructuredInterface
:keyword family_id:
:paramtype family_id: str
:keyword name:
:paramtype name: str
:keyword hash:
:paramtype hash: str
:keyword description:
:paramtype description: str
:keyword version:
:paramtype version: str
:keyword sequence_number_in_family:
:paramtype sequence_number_in_family: int
:keyword owner:
:paramtype owner: str
:keyword azure_tenant_id:
:paramtype azure_tenant_id: str
:keyword azure_user_id:
:paramtype azure_user_id: str
:keyword collaborators:
:paramtype collaborators: list[str]
:keyword id:
:paramtype id: str
:keyword workspace_id:
:paramtype workspace_id: str
:keyword etag:
:paramtype etag: str
:keyword tags: A set of tags.
:paramtype tags: list[str]
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.AetherEntityStatus
"""
super(AetherModuleEntity, self).__init__(**kwargs)
self.last_updated_by = kwargs.get('last_updated_by', None)
self.display_name = kwargs.get('display_name', None)
self.module_execution_type = kwargs.get('module_execution_type', None)
self.module_type = kwargs.get('module_type', None)
self.module_type_version = kwargs.get('module_type_version', None)
self.resource_requirements = kwargs.get('resource_requirements', None)
self.machine_cluster = kwargs.get('machine_cluster', None)
self.default_compliance_cluster = kwargs.get('default_compliance_cluster', None)
self.repository_type = kwargs.get('repository_type', None)
self.relative_path_to_source_code = kwargs.get('relative_path_to_source_code', None)
self.commit_id = kwargs.get('commit_id', None)
self.code_review_link = kwargs.get('code_review_link', None)
self.unit_tests_available = kwargs.get('unit_tests_available', None)
self.is_compressed = kwargs.get('is_compressed', None)
self.execution_environment = kwargs.get('execution_environment', None)
self.is_output_markup_enabled = kwargs.get('is_output_markup_enabled', None)
self.docker_image_id = kwargs.get('docker_image_id', None)
self.docker_image_reference = kwargs.get('docker_image_reference', None)
self.docker_image_security_groups = kwargs.get('docker_image_security_groups', None)
self.extended_properties = kwargs.get('extended_properties', None)
self.deployment_source = kwargs.get('deployment_source', None)
self.deployment_source_metadata = kwargs.get('deployment_source_metadata', None)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None)
self.kv_tags = kwargs.get('kv_tags', None)
self.properties = kwargs.get('properties', None)
self.created_by = kwargs.get('created_by', None)
self.runconfig = kwargs.get('runconfig', None)
self.cloud_settings = kwargs.get('cloud_settings', None)
self.category = kwargs.get('category', None)
self.step_type = kwargs.get('step_type', None)
self.stage = kwargs.get('stage', None)
self.upload_state = kwargs.get('upload_state', None)
self.source_code_location = kwargs.get('source_code_location', None)
self.size_in_bytes = kwargs.get('size_in_bytes', None)
self.download_location = kwargs.get('download_location', None)
self.data_location = kwargs.get('data_location', None)
self.scripting_runtime_id = kwargs.get('scripting_runtime_id', None)
self.interface_documentation = kwargs.get('interface_documentation', None)
self.is_eyes_on = kwargs.get('is_eyes_on', None)
self.compliance_cluster = kwargs.get('compliance_cluster', None)
self.is_deterministic = kwargs.get('is_deterministic', None)
self.information_url = kwargs.get('information_url', None)
self.is_experiment_id_in_parameters = kwargs.get('is_experiment_id_in_parameters', None)
self.interface_string = kwargs.get('interface_string', None)
self.default_parameters = kwargs.get('default_parameters', None)
self.structured_interface = kwargs.get('structured_interface', None)
self.family_id = kwargs.get('family_id', None)
self.name = kwargs.get('name', None)
self.hash = kwargs.get('hash', None)
self.description = kwargs.get('description', None)
self.version = kwargs.get('version', None)
self.sequence_number_in_family = kwargs.get('sequence_number_in_family', None)
self.owner = kwargs.get('owner', None)
self.azure_tenant_id = kwargs.get('azure_tenant_id', None)
self.azure_user_id = kwargs.get('azure_user_id', None)
self.collaborators = kwargs.get('collaborators', None)
self.id = kwargs.get('id', None)
self.workspace_id = kwargs.get('workspace_id', None)
self.etag = kwargs.get('etag', None)
self.tags = kwargs.get('tags', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
self.entity_status = kwargs.get('entity_status', None)
class AetherModuleExtendedProperties(msrest.serialization.Model):
"""AetherModuleExtendedProperties.
:ivar auto_deployed_artifact:
:vartype auto_deployed_artifact: ~flow.models.AetherBuildArtifactInfo
:ivar script_needs_approval:
:vartype script_needs_approval: bool
"""
_attribute_map = {
'auto_deployed_artifact': {'key': 'autoDeployedArtifact', 'type': 'AetherBuildArtifactInfo'},
'script_needs_approval': {'key': 'scriptNeedsApproval', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword auto_deployed_artifact:
:paramtype auto_deployed_artifact: ~flow.models.AetherBuildArtifactInfo
:keyword script_needs_approval:
:paramtype script_needs_approval: bool
"""
super(AetherModuleExtendedProperties, self).__init__(**kwargs)
self.auto_deployed_artifact = kwargs.get('auto_deployed_artifact', None)
self.script_needs_approval = kwargs.get('script_needs_approval', None)
class AetherNCrossValidations(msrest.serialization.Model):
"""AetherNCrossValidations.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.AetherNCrossValidationMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.AetherNCrossValidationMode
:keyword value:
:paramtype value: int
"""
super(AetherNCrossValidations, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class AetherOutputSetting(msrest.serialization.Model):
"""AetherOutputSetting.
:ivar name:
:vartype name: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_name_parameter_assignment:
:vartype data_store_name_parameter_assignment: ~flow.models.AetherParameterAssignment
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar data_store_mode_parameter_assignment:
:vartype data_store_mode_parameter_assignment: ~flow.models.AetherParameterAssignment
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar path_on_compute_parameter_assignment:
:vartype path_on_compute_parameter_assignment: ~flow.models.AetherParameterAssignment
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar web_service_port:
:vartype web_service_port: str
:ivar dataset_registration:
:vartype dataset_registration: ~flow.models.AetherDatasetRegistration
:ivar dataset_output_options:
:vartype dataset_output_options: ~flow.models.AetherDatasetOutputOptions
:ivar asset_output_settings:
:vartype asset_output_settings: ~flow.models.AetherAssetOutputSettings
:ivar parameter_name:
:vartype parameter_name: str
:ivar asset_output_settings_parameter_name:
:vartype asset_output_settings_parameter_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_name_parameter_assignment': {'key': 'DataStoreNameParameterAssignment', 'type': 'AetherParameterAssignment'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'data_store_mode_parameter_assignment': {'key': 'DataStoreModeParameterAssignment', 'type': 'AetherParameterAssignment'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'path_on_compute_parameter_assignment': {'key': 'PathOnComputeParameterAssignment', 'type': 'AetherParameterAssignment'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'web_service_port': {'key': 'webServicePort', 'type': 'str'},
'dataset_registration': {'key': 'datasetRegistration', 'type': 'AetherDatasetRegistration'},
'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'AetherDatasetOutputOptions'},
'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AetherAssetOutputSettings'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
'asset_output_settings_parameter_name': {'key': 'AssetOutputSettingsParameterName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_name_parameter_assignment:
:paramtype data_store_name_parameter_assignment: ~flow.models.AetherParameterAssignment
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword data_store_mode_parameter_assignment:
:paramtype data_store_mode_parameter_assignment: ~flow.models.AetherParameterAssignment
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword path_on_compute_parameter_assignment:
:paramtype path_on_compute_parameter_assignment: ~flow.models.AetherParameterAssignment
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword web_service_port:
:paramtype web_service_port: str
:keyword dataset_registration:
:paramtype dataset_registration: ~flow.models.AetherDatasetRegistration
:keyword dataset_output_options:
:paramtype dataset_output_options: ~flow.models.AetherDatasetOutputOptions
:keyword asset_output_settings:
:paramtype asset_output_settings: ~flow.models.AetherAssetOutputSettings
:keyword parameter_name:
:paramtype parameter_name: str
:keyword asset_output_settings_parameter_name:
:paramtype asset_output_settings_parameter_name: str
"""
super(AetherOutputSetting, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_store_name_parameter_assignment = kwargs.get('data_store_name_parameter_assignment', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.data_store_mode_parameter_assignment = kwargs.get('data_store_mode_parameter_assignment', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.path_on_compute_parameter_assignment = kwargs.get('path_on_compute_parameter_assignment', None)
self.overwrite = kwargs.get('overwrite', None)
self.data_reference_name = kwargs.get('data_reference_name', None)
self.web_service_port = kwargs.get('web_service_port', None)
self.dataset_registration = kwargs.get('dataset_registration', None)
self.dataset_output_options = kwargs.get('dataset_output_options', None)
self.asset_output_settings = kwargs.get('asset_output_settings', None)
self.parameter_name = kwargs.get('parameter_name', None)
self.asset_output_settings_parameter_name = kwargs.get('asset_output_settings_parameter_name', None)
class AetherParallelForControlFlowInfo(msrest.serialization.Model):
"""AetherParallelForControlFlowInfo.
:ivar parallel_for_items_input:
:vartype parallel_for_items_input: ~flow.models.AetherParameterAssignment
"""
_attribute_map = {
'parallel_for_items_input': {'key': 'parallelForItemsInput', 'type': 'AetherParameterAssignment'},
}
def __init__(
self,
**kwargs
):
"""
:keyword parallel_for_items_input:
:paramtype parallel_for_items_input: ~flow.models.AetherParameterAssignment
"""
super(AetherParallelForControlFlowInfo, self).__init__(**kwargs)
self.parallel_for_items_input = kwargs.get('parallel_for_items_input', None)
class AetherParameterAssignment(msrest.serialization.Model):
"""AetherParameterAssignment.
:ivar value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:vartype value_type: str or ~flow.models.AetherParameterValueType
:ivar assignments_to_concatenate:
:vartype assignments_to_concatenate: list[~flow.models.AetherParameterAssignment]
:ivar data_path_assignment:
:vartype data_path_assignment: ~flow.models.AetherLegacyDataPath
:ivar data_set_definition_value_assignment:
:vartype data_set_definition_value_assignment: ~flow.models.AetherDataSetDefinitionValue
:ivar name:
:vartype name: str
:ivar value:
:vartype value: str
"""
_attribute_map = {
'value_type': {'key': 'valueType', 'type': 'str'},
'assignments_to_concatenate': {'key': 'assignmentsToConcatenate', 'type': '[AetherParameterAssignment]'},
'data_path_assignment': {'key': 'dataPathAssignment', 'type': 'AetherLegacyDataPath'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': 'AetherDataSetDefinitionValue'},
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:paramtype value_type: str or ~flow.models.AetherParameterValueType
:keyword assignments_to_concatenate:
:paramtype assignments_to_concatenate: list[~flow.models.AetherParameterAssignment]
:keyword data_path_assignment:
:paramtype data_path_assignment: ~flow.models.AetherLegacyDataPath
:keyword data_set_definition_value_assignment:
:paramtype data_set_definition_value_assignment: ~flow.models.AetherDataSetDefinitionValue
:keyword name:
:paramtype name: str
:keyword value:
:paramtype value: str
"""
super(AetherParameterAssignment, self).__init__(**kwargs)
self.value_type = kwargs.get('value_type', None)
self.assignments_to_concatenate = kwargs.get('assignments_to_concatenate', None)
self.data_path_assignment = kwargs.get('data_path_assignment', None)
self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None)
self.name = kwargs.get('name', None)
self.value = kwargs.get('value', None)
class AetherPhillyHdfsReference(msrest.serialization.Model):
"""AetherPhillyHdfsReference.
:ivar cluster:
:vartype cluster: str
:ivar vc:
:vartype vc: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'cluster': {'key': 'cluster', 'type': 'str'},
'vc': {'key': 'vc', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword cluster:
:paramtype cluster: str
:keyword vc:
:paramtype vc: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(AetherPhillyHdfsReference, self).__init__(**kwargs)
self.cluster = kwargs.get('cluster', None)
self.vc = kwargs.get('vc', None)
self.relative_path = kwargs.get('relative_path', None)
class AetherPortInfo(msrest.serialization.Model):
"""AetherPortInfo.
:ivar node_id:
:vartype node_id: str
:ivar port_name:
:vartype port_name: str
:ivar graph_port_name:
:vartype graph_port_name: str
:ivar is_parameter:
:vartype is_parameter: bool
:ivar web_service_port:
:vartype web_service_port: str
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'graph_port_name': {'key': 'graphPortName', 'type': 'str'},
'is_parameter': {'key': 'isParameter', 'type': 'bool'},
'web_service_port': {'key': 'webServicePort', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword graph_port_name:
:paramtype graph_port_name: str
:keyword is_parameter:
:paramtype is_parameter: bool
:keyword web_service_port:
:paramtype web_service_port: str
"""
super(AetherPortInfo, self).__init__(**kwargs)
self.node_id = kwargs.get('node_id', None)
self.port_name = kwargs.get('port_name', None)
self.graph_port_name = kwargs.get('graph_port_name', None)
self.is_parameter = kwargs.get('is_parameter', None)
self.web_service_port = kwargs.get('web_service_port', None)
class AetherPriorityConfig(msrest.serialization.Model):
"""AetherPriorityConfig.
:ivar job_priority:
:vartype job_priority: int
:ivar is_preemptible:
:vartype is_preemptible: bool
:ivar node_count_set:
:vartype node_count_set: list[int]
:ivar scale_interval:
:vartype scale_interval: int
"""
_attribute_map = {
'job_priority': {'key': 'jobPriority', 'type': 'int'},
'is_preemptible': {'key': 'isPreemptible', 'type': 'bool'},
'node_count_set': {'key': 'nodeCountSet', 'type': '[int]'},
'scale_interval': {'key': 'scaleInterval', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_priority:
:paramtype job_priority: int
:keyword is_preemptible:
:paramtype is_preemptible: bool
:keyword node_count_set:
:paramtype node_count_set: list[int]
:keyword scale_interval:
:paramtype scale_interval: int
"""
super(AetherPriorityConfig, self).__init__(**kwargs)
self.job_priority = kwargs.get('job_priority', None)
self.is_preemptible = kwargs.get('is_preemptible', None)
self.node_count_set = kwargs.get('node_count_set', None)
self.scale_interval = kwargs.get('scale_interval', None)
class AetherPriorityConfiguration(msrest.serialization.Model):
"""AetherPriorityConfiguration.
:ivar cloud_priority:
:vartype cloud_priority: int
:ivar string_type_priority:
:vartype string_type_priority: str
"""
_attribute_map = {
'cloud_priority': {'key': 'cloudPriority', 'type': 'int'},
'string_type_priority': {'key': 'stringTypePriority', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword cloud_priority:
:paramtype cloud_priority: int
:keyword string_type_priority:
:paramtype string_type_priority: str
"""
super(AetherPriorityConfiguration, self).__init__(**kwargs)
self.cloud_priority = kwargs.get('cloud_priority', None)
self.string_type_priority = kwargs.get('string_type_priority', None)
class AetherRegisteredDataSetReference(msrest.serialization.Model):
"""AetherRegisteredDataSetReference.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
"""
super(AetherRegisteredDataSetReference, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
class AetherRemoteDockerComputeInfo(msrest.serialization.Model):
"""AetherRemoteDockerComputeInfo.
:ivar address:
:vartype address: str
:ivar username:
:vartype username: str
:ivar password:
:vartype password: str
:ivar private_key:
:vartype private_key: str
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'private_key': {'key': 'privateKey', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword address:
:paramtype address: str
:keyword username:
:paramtype username: str
:keyword password:
:paramtype password: str
:keyword private_key:
:paramtype private_key: str
"""
super(AetherRemoteDockerComputeInfo, self).__init__(**kwargs)
self.address = kwargs.get('address', None)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.private_key = kwargs.get('private_key', None)
class AetherResourceAssignment(msrest.serialization.Model):
"""AetherResourceAssignment.
:ivar attributes: Dictionary of :code:`<AetherResourceAttributeAssignment>`.
:vartype attributes: dict[str, ~flow.models.AetherResourceAttributeAssignment]
"""
_attribute_map = {
'attributes': {'key': 'attributes', 'type': '{AetherResourceAttributeAssignment}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword attributes: Dictionary of :code:`<AetherResourceAttributeAssignment>`.
:paramtype attributes: dict[str, ~flow.models.AetherResourceAttributeAssignment]
"""
super(AetherResourceAssignment, self).__init__(**kwargs)
self.attributes = kwargs.get('attributes', None)
class AetherResourceAttributeAssignment(msrest.serialization.Model):
"""AetherResourceAttributeAssignment.
:ivar attribute:
:vartype attribute: ~flow.models.AetherResourceAttributeDefinition
:ivar operator: Possible values include: "Equal", "Contain", "GreaterOrEqual".
:vartype operator: str or ~flow.models.AetherResourceOperator
:ivar value:
:vartype value: str
"""
_attribute_map = {
'attribute': {'key': 'attribute', 'type': 'AetherResourceAttributeDefinition'},
'operator': {'key': 'operator', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword attribute:
:paramtype attribute: ~flow.models.AetherResourceAttributeDefinition
:keyword operator: Possible values include: "Equal", "Contain", "GreaterOrEqual".
:paramtype operator: str or ~flow.models.AetherResourceOperator
:keyword value:
:paramtype value: str
"""
super(AetherResourceAttributeAssignment, self).__init__(**kwargs)
self.attribute = kwargs.get('attribute', None)
self.operator = kwargs.get('operator', None)
self.value = kwargs.get('value', None)
class AetherResourceAttributeDefinition(msrest.serialization.Model):
"""AetherResourceAttributeDefinition.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "String", "Double".
:vartype type: str or ~flow.models.AetherResourceValueType
:ivar units:
:vartype units: str
:ivar allowed_operators:
:vartype allowed_operators: list[str or ~flow.models.AetherResourceOperator]
"""
_validation = {
'allowed_operators': {'unique': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'units': {'key': 'units', 'type': 'str'},
'allowed_operators': {'key': 'allowedOperators', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "String", "Double".
:paramtype type: str or ~flow.models.AetherResourceValueType
:keyword units:
:paramtype units: str
:keyword allowed_operators:
:paramtype allowed_operators: list[str or ~flow.models.AetherResourceOperator]
"""
super(AetherResourceAttributeDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.units = kwargs.get('units', None)
self.allowed_operators = kwargs.get('allowed_operators', None)
class AetherResourceConfig(msrest.serialization.Model):
"""AetherResourceConfig.
:ivar gpu_count:
:vartype gpu_count: int
:ivar cpu_count:
:vartype cpu_count: int
:ivar memory_request_in_gb:
:vartype memory_request_in_gb: int
"""
_attribute_map = {
'gpu_count': {'key': 'gpuCount', 'type': 'int'},
'cpu_count': {'key': 'cpuCount', 'type': 'int'},
'memory_request_in_gb': {'key': 'memoryRequestInGB', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword gpu_count:
:paramtype gpu_count: int
:keyword cpu_count:
:paramtype cpu_count: int
:keyword memory_request_in_gb:
:paramtype memory_request_in_gb: int
"""
super(AetherResourceConfig, self).__init__(**kwargs)
self.gpu_count = kwargs.get('gpu_count', None)
self.cpu_count = kwargs.get('cpu_count', None)
self.memory_request_in_gb = kwargs.get('memory_request_in_gb', None)
class AetherResourceConfiguration(msrest.serialization.Model):
"""AetherResourceConfiguration.
:ivar instance_count:
:vartype instance_count: int
:ivar instance_type:
:vartype instance_type: str
:ivar properties: Dictionary of :code:`<any>`.
:vartype properties: dict[str, any]
:ivar locations:
:vartype locations: list[str]
:ivar instance_priority:
:vartype instance_priority: str
:ivar quota_enforcement_resource_id:
:vartype quota_enforcement_resource_id: str
"""
_attribute_map = {
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
'locations': {'key': 'locations', 'type': '[str]'},
'instance_priority': {'key': 'instancePriority', 'type': 'str'},
'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword instance_count:
:paramtype instance_count: int
:keyword instance_type:
:paramtype instance_type: str
:keyword properties: Dictionary of :code:`<any>`.
:paramtype properties: dict[str, any]
:keyword locations:
:paramtype locations: list[str]
:keyword instance_priority:
:paramtype instance_priority: str
:keyword quota_enforcement_resource_id:
:paramtype quota_enforcement_resource_id: str
"""
super(AetherResourceConfiguration, self).__init__(**kwargs)
self.instance_count = kwargs.get('instance_count', None)
self.instance_type = kwargs.get('instance_type', None)
self.properties = kwargs.get('properties', None)
self.locations = kwargs.get('locations', None)
self.instance_priority = kwargs.get('instance_priority', None)
self.quota_enforcement_resource_id = kwargs.get('quota_enforcement_resource_id', None)
class AetherResourceModel(msrest.serialization.Model):
"""AetherResourceModel.
:ivar resources:
:vartype resources: list[~flow.models.AetherResourceAssignment]
"""
_attribute_map = {
'resources': {'key': 'resources', 'type': '[AetherResourceAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword resources:
:paramtype resources: list[~flow.models.AetherResourceAssignment]
"""
super(AetherResourceModel, self).__init__(**kwargs)
self.resources = kwargs.get('resources', None)
class AetherResourcesSetting(msrest.serialization.Model):
"""AetherResourcesSetting.
:ivar instance_size:
:vartype instance_size: str
:ivar spark_version:
:vartype spark_version: str
"""
_attribute_map = {
'instance_size': {'key': 'instanceSize', 'type': 'str'},
'spark_version': {'key': 'sparkVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword instance_size:
:paramtype instance_size: str
:keyword spark_version:
:paramtype spark_version: str
"""
super(AetherResourcesSetting, self).__init__(**kwargs)
self.instance_size = kwargs.get('instance_size', None)
self.spark_version = kwargs.get('spark_version', None)
class AetherSavedDataSetReference(msrest.serialization.Model):
"""AetherSavedDataSetReference.
:ivar id:
:vartype id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
"""
super(AetherSavedDataSetReference, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
class AetherScopeCloudConfiguration(msrest.serialization.Model):
"""AetherScopeCloudConfiguration.
:ivar input_path_suffixes: This is a dictionary.
:vartype input_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment]
:ivar output_path_suffixes: This is a dictionary.
:vartype output_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment]
:ivar user_alias:
:vartype user_alias: str
:ivar tokens:
:vartype tokens: int
:ivar auto_token:
:vartype auto_token: int
:ivar vcp:
:vartype vcp: float
"""
_attribute_map = {
'input_path_suffixes': {'key': 'inputPathSuffixes', 'type': '{AetherArgumentAssignment}'},
'output_path_suffixes': {'key': 'outputPathSuffixes', 'type': '{AetherArgumentAssignment}'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'tokens': {'key': 'tokens', 'type': 'int'},
'auto_token': {'key': 'autoToken', 'type': 'int'},
'vcp': {'key': 'vcp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
"""
:keyword input_path_suffixes: This is a dictionary.
:paramtype input_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment]
:keyword output_path_suffixes: This is a dictionary.
:paramtype output_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment]
:keyword user_alias:
:paramtype user_alias: str
:keyword tokens:
:paramtype tokens: int
:keyword auto_token:
:paramtype auto_token: int
:keyword vcp:
:paramtype vcp: float
"""
super(AetherScopeCloudConfiguration, self).__init__(**kwargs)
self.input_path_suffixes = kwargs.get('input_path_suffixes', None)
self.output_path_suffixes = kwargs.get('output_path_suffixes', None)
self.user_alias = kwargs.get('user_alias', None)
self.tokens = kwargs.get('tokens', None)
self.auto_token = kwargs.get('auto_token', None)
self.vcp = kwargs.get('vcp', None)
class AetherSeasonality(msrest.serialization.Model):
"""AetherSeasonality.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.AetherSeasonalityMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.AetherSeasonalityMode
:keyword value:
:paramtype value: int
"""
super(AetherSeasonality, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class AetherSqlDataPath(msrest.serialization.Model):
"""AetherSqlDataPath.
:ivar sql_table_name:
:vartype sql_table_name: str
:ivar sql_query:
:vartype sql_query: str
:ivar sql_stored_procedure_name:
:vartype sql_stored_procedure_name: str
:ivar sql_stored_procedure_params:
:vartype sql_stored_procedure_params: list[~flow.models.AetherStoredProcedureParameter]
"""
_attribute_map = {
'sql_table_name': {'key': 'sqlTableName', 'type': 'str'},
'sql_query': {'key': 'sqlQuery', 'type': 'str'},
'sql_stored_procedure_name': {'key': 'sqlStoredProcedureName', 'type': 'str'},
'sql_stored_procedure_params': {'key': 'sqlStoredProcedureParams', 'type': '[AetherStoredProcedureParameter]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword sql_table_name:
:paramtype sql_table_name: str
:keyword sql_query:
:paramtype sql_query: str
:keyword sql_stored_procedure_name:
:paramtype sql_stored_procedure_name: str
:keyword sql_stored_procedure_params:
:paramtype sql_stored_procedure_params: list[~flow.models.AetherStoredProcedureParameter]
"""
super(AetherSqlDataPath, self).__init__(**kwargs)
self.sql_table_name = kwargs.get('sql_table_name', None)
self.sql_query = kwargs.get('sql_query', None)
self.sql_stored_procedure_name = kwargs.get('sql_stored_procedure_name', None)
self.sql_stored_procedure_params = kwargs.get('sql_stored_procedure_params', None)
class AetherStackEnsembleSettings(msrest.serialization.Model):
"""AetherStackEnsembleSettings.
:ivar stack_meta_learner_type: Possible values include: "None", "LogisticRegression",
"LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV",
"LightGBMRegressor", "LinearRegression".
:vartype stack_meta_learner_type: str or ~flow.models.AetherStackMetaLearnerType
:ivar stack_meta_learner_train_percentage:
:vartype stack_meta_learner_train_percentage: float
:ivar stack_meta_learner_k_wargs: Anything.
:vartype stack_meta_learner_k_wargs: any
"""
_attribute_map = {
'stack_meta_learner_type': {'key': 'stackMetaLearnerType', 'type': 'str'},
'stack_meta_learner_train_percentage': {'key': 'stackMetaLearnerTrainPercentage', 'type': 'float'},
'stack_meta_learner_k_wargs': {'key': 'stackMetaLearnerKWargs', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword stack_meta_learner_type: Possible values include: "None", "LogisticRegression",
"LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV",
"LightGBMRegressor", "LinearRegression".
:paramtype stack_meta_learner_type: str or ~flow.models.AetherStackMetaLearnerType
:keyword stack_meta_learner_train_percentage:
:paramtype stack_meta_learner_train_percentage: float
:keyword stack_meta_learner_k_wargs: Anything.
:paramtype stack_meta_learner_k_wargs: any
"""
super(AetherStackEnsembleSettings, self).__init__(**kwargs)
self.stack_meta_learner_type = kwargs.get('stack_meta_learner_type', None)
self.stack_meta_learner_train_percentage = kwargs.get('stack_meta_learner_train_percentage', None)
self.stack_meta_learner_k_wargs = kwargs.get('stack_meta_learner_k_wargs', None)
class AetherStoredProcedureParameter(msrest.serialization.Model):
"""AetherStoredProcedureParameter.
:ivar name:
:vartype name: str
:ivar value:
:vartype value: str
:ivar type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
:vartype type: str or ~flow.models.AetherStoredProcedureParameterType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword value:
:paramtype value: str
:keyword type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
:paramtype type: str or ~flow.models.AetherStoredProcedureParameterType
"""
super(AetherStoredProcedureParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.value = kwargs.get('value', None)
self.type = kwargs.get('type', None)
class AetherStructuredInterface(msrest.serialization.Model):
"""AetherStructuredInterface.
:ivar command_line_pattern:
:vartype command_line_pattern: str
:ivar inputs:
:vartype inputs: list[~flow.models.AetherStructuredInterfaceInput]
:ivar outputs:
:vartype outputs: list[~flow.models.AetherStructuredInterfaceOutput]
:ivar control_outputs:
:vartype control_outputs: list[~flow.models.AetherControlOutput]
:ivar parameters:
:vartype parameters: list[~flow.models.AetherStructuredInterfaceParameter]
:ivar metadata_parameters:
:vartype metadata_parameters: list[~flow.models.AetherStructuredInterfaceParameter]
:ivar arguments:
:vartype arguments: list[~flow.models.AetherArgumentAssignment]
"""
_attribute_map = {
'command_line_pattern': {'key': 'commandLinePattern', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '[AetherStructuredInterfaceInput]'},
'outputs': {'key': 'outputs', 'type': '[AetherStructuredInterfaceOutput]'},
'control_outputs': {'key': 'controlOutputs', 'type': '[AetherControlOutput]'},
'parameters': {'key': 'parameters', 'type': '[AetherStructuredInterfaceParameter]'},
'metadata_parameters': {'key': 'metadataParameters', 'type': '[AetherStructuredInterfaceParameter]'},
'arguments': {'key': 'arguments', 'type': '[AetherArgumentAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword command_line_pattern:
:paramtype command_line_pattern: str
:keyword inputs:
:paramtype inputs: list[~flow.models.AetherStructuredInterfaceInput]
:keyword outputs:
:paramtype outputs: list[~flow.models.AetherStructuredInterfaceOutput]
:keyword control_outputs:
:paramtype control_outputs: list[~flow.models.AetherControlOutput]
:keyword parameters:
:paramtype parameters: list[~flow.models.AetherStructuredInterfaceParameter]
:keyword metadata_parameters:
:paramtype metadata_parameters: list[~flow.models.AetherStructuredInterfaceParameter]
:keyword arguments:
:paramtype arguments: list[~flow.models.AetherArgumentAssignment]
"""
super(AetherStructuredInterface, self).__init__(**kwargs)
self.command_line_pattern = kwargs.get('command_line_pattern', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.control_outputs = kwargs.get('control_outputs', None)
self.parameters = kwargs.get('parameters', None)
self.metadata_parameters = kwargs.get('metadata_parameters', None)
self.arguments = kwargs.get('arguments', None)
class AetherStructuredInterfaceInput(msrest.serialization.Model):
"""AetherStructuredInterfaceInput.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar data_type_ids_list:
:vartype data_type_ids_list: list[str]
:ivar is_optional:
:vartype is_optional: bool
:ivar description:
:vartype description: str
:ivar skip_processing:
:vartype skip_processing: bool
:ivar is_resource:
:vartype is_resource: bool
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar dataset_types:
:vartype dataset_types: list[str or ~flow.models.AetherDatasetType]
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_validation = {
'dataset_types': {'unique': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'data_type_ids_list': {'key': 'dataTypeIdsList', 'type': '[str]'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'skip_processing': {'key': 'skipProcessing', 'type': 'bool'},
'is_resource': {'key': 'isResource', 'type': 'bool'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'dataset_types': {'key': 'datasetTypes', 'type': '[str]'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword data_type_ids_list:
:paramtype data_type_ids_list: list[str]
:keyword is_optional:
:paramtype is_optional: bool
:keyword description:
:paramtype description: str
:keyword skip_processing:
:paramtype skip_processing: bool
:keyword is_resource:
:paramtype is_resource: bool
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword dataset_types:
:paramtype dataset_types: list[str or ~flow.models.AetherDatasetType]
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(AetherStructuredInterfaceInput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.label = kwargs.get('label', None)
self.data_type_ids_list = kwargs.get('data_type_ids_list', None)
self.is_optional = kwargs.get('is_optional', None)
self.description = kwargs.get('description', None)
self.skip_processing = kwargs.get('skip_processing', None)
self.is_resource = kwargs.get('is_resource', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.overwrite = kwargs.get('overwrite', None)
self.data_reference_name = kwargs.get('data_reference_name', None)
self.dataset_types = kwargs.get('dataset_types', None)
self.additional_transformations = kwargs.get('additional_transformations', None)
class AetherStructuredInterfaceOutput(msrest.serialization.Model):
"""AetherStructuredInterfaceOutput.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar data_type_id:
:vartype data_type_id: str
:ivar pass_through_data_type_input_name:
:vartype pass_through_data_type_input_name: str
:ivar description:
:vartype description: str
:ivar skip_processing:
:vartype skip_processing: bool
:ivar is_artifact:
:vartype is_artifact: bool
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar training_output:
:vartype training_output: ~flow.models.AetherTrainingOutput
:ivar dataset_output:
:vartype dataset_output: ~flow.models.AetherDatasetOutput
:ivar asset_output_settings:
:vartype asset_output_settings: ~flow.models.AetherAssetOutputSettings
:ivar early_available:
:vartype early_available: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
'pass_through_data_type_input_name': {'key': 'passThroughDataTypeInputName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'skip_processing': {'key': 'skipProcessing', 'type': 'bool'},
'is_artifact': {'key': 'isArtifact', 'type': 'bool'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'training_output': {'key': 'trainingOutput', 'type': 'AetherTrainingOutput'},
'dataset_output': {'key': 'datasetOutput', 'type': 'AetherDatasetOutput'},
'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AetherAssetOutputSettings'},
'early_available': {'key': 'earlyAvailable', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword data_type_id:
:paramtype data_type_id: str
:keyword pass_through_data_type_input_name:
:paramtype pass_through_data_type_input_name: str
:keyword description:
:paramtype description: str
:keyword skip_processing:
:paramtype skip_processing: bool
:keyword is_artifact:
:paramtype is_artifact: bool
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword training_output:
:paramtype training_output: ~flow.models.AetherTrainingOutput
:keyword dataset_output:
:paramtype dataset_output: ~flow.models.AetherDatasetOutput
:keyword asset_output_settings:
:paramtype asset_output_settings: ~flow.models.AetherAssetOutputSettings
:keyword early_available:
:paramtype early_available: bool
"""
super(AetherStructuredInterfaceOutput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.label = kwargs.get('label', None)
self.data_type_id = kwargs.get('data_type_id', None)
self.pass_through_data_type_input_name = kwargs.get('pass_through_data_type_input_name', None)
self.description = kwargs.get('description', None)
self.skip_processing = kwargs.get('skip_processing', None)
self.is_artifact = kwargs.get('is_artifact', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.overwrite = kwargs.get('overwrite', None)
self.data_reference_name = kwargs.get('data_reference_name', None)
self.training_output = kwargs.get('training_output', None)
self.dataset_output = kwargs.get('dataset_output', None)
self.asset_output_settings = kwargs.get('asset_output_settings', None)
self.early_available = kwargs.get('early_available', None)
class AetherStructuredInterfaceParameter(msrest.serialization.Model):
"""AetherStructuredInterfaceParameter.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar parameter_type: Possible values include: "Int", "Double", "Bool", "String", "Undefined".
:vartype parameter_type: str or ~flow.models.AetherParameterType
:ivar is_optional:
:vartype is_optional: bool
:ivar default_value:
:vartype default_value: str
:ivar lower_bound:
:vartype lower_bound: str
:ivar upper_bound:
:vartype upper_bound: str
:ivar enum_values:
:vartype enum_values: list[str]
:ivar enum_values_to_argument_strings: This is a dictionary.
:vartype enum_values_to_argument_strings: dict[str, str]
:ivar description:
:vartype description: str
:ivar set_environment_variable:
:vartype set_environment_variable: bool
:ivar environment_variable_override:
:vartype environment_variable_override: str
:ivar enabled_by_parameter_name:
:vartype enabled_by_parameter_name: str
:ivar enabled_by_parameter_values:
:vartype enabled_by_parameter_values: list[str]
:ivar ui_hint:
:vartype ui_hint: ~flow.models.AetherUIParameterHint
:ivar group_names:
:vartype group_names: list[str]
:ivar argument_name:
:vartype argument_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'parameter_type': {'key': 'parameterType', 'type': 'str'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'lower_bound': {'key': 'lowerBound', 'type': 'str'},
'upper_bound': {'key': 'upperBound', 'type': 'str'},
'enum_values': {'key': 'enumValues', 'type': '[str]'},
'enum_values_to_argument_strings': {'key': 'enumValuesToArgumentStrings', 'type': '{str}'},
'description': {'key': 'description', 'type': 'str'},
'set_environment_variable': {'key': 'setEnvironmentVariable', 'type': 'bool'},
'environment_variable_override': {'key': 'environmentVariableOverride', 'type': 'str'},
'enabled_by_parameter_name': {'key': 'enabledByParameterName', 'type': 'str'},
'enabled_by_parameter_values': {'key': 'enabledByParameterValues', 'type': '[str]'},
'ui_hint': {'key': 'uiHint', 'type': 'AetherUIParameterHint'},
'group_names': {'key': 'groupNames', 'type': '[str]'},
'argument_name': {'key': 'argumentName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword parameter_type: Possible values include: "Int", "Double", "Bool", "String",
"Undefined".
:paramtype parameter_type: str or ~flow.models.AetherParameterType
:keyword is_optional:
:paramtype is_optional: bool
:keyword default_value:
:paramtype default_value: str
:keyword lower_bound:
:paramtype lower_bound: str
:keyword upper_bound:
:paramtype upper_bound: str
:keyword enum_values:
:paramtype enum_values: list[str]
:keyword enum_values_to_argument_strings: This is a dictionary.
:paramtype enum_values_to_argument_strings: dict[str, str]
:keyword description:
:paramtype description: str
:keyword set_environment_variable:
:paramtype set_environment_variable: bool
:keyword environment_variable_override:
:paramtype environment_variable_override: str
:keyword enabled_by_parameter_name:
:paramtype enabled_by_parameter_name: str
:keyword enabled_by_parameter_values:
:paramtype enabled_by_parameter_values: list[str]
:keyword ui_hint:
:paramtype ui_hint: ~flow.models.AetherUIParameterHint
:keyword group_names:
:paramtype group_names: list[str]
:keyword argument_name:
:paramtype argument_name: str
"""
super(AetherStructuredInterfaceParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.label = kwargs.get('label', None)
self.parameter_type = kwargs.get('parameter_type', None)
self.is_optional = kwargs.get('is_optional', None)
self.default_value = kwargs.get('default_value', None)
self.lower_bound = kwargs.get('lower_bound', None)
self.upper_bound = kwargs.get('upper_bound', None)
self.enum_values = kwargs.get('enum_values', None)
self.enum_values_to_argument_strings = kwargs.get('enum_values_to_argument_strings', None)
self.description = kwargs.get('description', None)
self.set_environment_variable = kwargs.get('set_environment_variable', None)
self.environment_variable_override = kwargs.get('environment_variable_override', None)
self.enabled_by_parameter_name = kwargs.get('enabled_by_parameter_name', None)
self.enabled_by_parameter_values = kwargs.get('enabled_by_parameter_values', None)
self.ui_hint = kwargs.get('ui_hint', None)
self.group_names = kwargs.get('group_names', None)
self.argument_name = kwargs.get('argument_name', None)
class AetherSubGraphConfiguration(msrest.serialization.Model):
"""AetherSubGraphConfiguration.
:ivar graph_id:
:vartype graph_id: str
:ivar graph_draft_id:
:vartype graph_draft_id: str
:ivar default_compute_internal:
:vartype default_compute_internal: ~flow.models.AetherComputeSetting
:ivar default_datastore_internal:
:vartype default_datastore_internal: ~flow.models.AetherDatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting
:ivar user_alias:
:vartype user_alias: str
:ivar is_dynamic:
:vartype is_dynamic: bool
"""
_attribute_map = {
'graph_id': {'key': 'graphId', 'type': 'str'},
'graph_draft_id': {'key': 'graphDraftId', 'type': 'str'},
'default_compute_internal': {'key': 'defaultComputeInternal', 'type': 'AetherComputeSetting'},
'default_datastore_internal': {'key': 'defaultDatastoreInternal', 'type': 'AetherDatastoreSetting'},
'default_cloud_priority': {'key': 'DefaultCloudPriority', 'type': 'AetherCloudPrioritySetting'},
'user_alias': {'key': 'UserAlias', 'type': 'str'},
'is_dynamic': {'key': 'IsDynamic', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword graph_id:
:paramtype graph_id: str
:keyword graph_draft_id:
:paramtype graph_draft_id: str
:keyword default_compute_internal:
:paramtype default_compute_internal: ~flow.models.AetherComputeSetting
:keyword default_datastore_internal:
:paramtype default_datastore_internal: ~flow.models.AetherDatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting
:keyword user_alias:
:paramtype user_alias: str
:keyword is_dynamic:
:paramtype is_dynamic: bool
"""
super(AetherSubGraphConfiguration, self).__init__(**kwargs)
self.graph_id = kwargs.get('graph_id', None)
self.graph_draft_id = kwargs.get('graph_draft_id', None)
self.default_compute_internal = kwargs.get('default_compute_internal', None)
self.default_datastore_internal = kwargs.get('default_datastore_internal', None)
self.default_cloud_priority = kwargs.get('default_cloud_priority', None)
self.user_alias = kwargs.get('user_alias', None)
self.is_dynamic = kwargs.get('is_dynamic', False)
class AetherSweepEarlyTerminationPolicy(msrest.serialization.Model):
"""AetherSweepEarlyTerminationPolicy.
:ivar policy_type: Possible values include: "Bandit", "MedianStopping", "TruncationSelection".
:vartype policy_type: str or ~flow.models.AetherEarlyTerminationPolicyType
:ivar evaluation_interval:
:vartype evaluation_interval: int
:ivar delay_evaluation:
:vartype delay_evaluation: int
:ivar slack_factor:
:vartype slack_factor: float
:ivar slack_amount:
:vartype slack_amount: float
:ivar truncation_percentage:
:vartype truncation_percentage: int
"""
_attribute_map = {
'policy_type': {'key': 'policyType', 'type': 'str'},
'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
'slack_factor': {'key': 'slackFactor', 'type': 'float'},
'slack_amount': {'key': 'slackAmount', 'type': 'float'},
'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword policy_type: Possible values include: "Bandit", "MedianStopping",
"TruncationSelection".
:paramtype policy_type: str or ~flow.models.AetherEarlyTerminationPolicyType
:keyword evaluation_interval:
:paramtype evaluation_interval: int
:keyword delay_evaluation:
:paramtype delay_evaluation: int
:keyword slack_factor:
:paramtype slack_factor: float
:keyword slack_amount:
:paramtype slack_amount: float
:keyword truncation_percentage:
:paramtype truncation_percentage: int
"""
super(AetherSweepEarlyTerminationPolicy, self).__init__(**kwargs)
self.policy_type = kwargs.get('policy_type', None)
self.evaluation_interval = kwargs.get('evaluation_interval', None)
self.delay_evaluation = kwargs.get('delay_evaluation', None)
self.slack_factor = kwargs.get('slack_factor', None)
self.slack_amount = kwargs.get('slack_amount', None)
self.truncation_percentage = kwargs.get('truncation_percentage', None)
class AetherSweepSettings(msrest.serialization.Model):
"""AetherSweepSettings.
:ivar limits:
:vartype limits: ~flow.models.AetherSweepSettingsLimits
:ivar search_space:
:vartype search_space: list[dict[str, str]]
:ivar sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian".
:vartype sampling_algorithm: str or ~flow.models.AetherSamplingAlgorithmType
:ivar early_termination:
:vartype early_termination: ~flow.models.AetherSweepEarlyTerminationPolicy
"""
_attribute_map = {
'limits': {'key': 'limits', 'type': 'AetherSweepSettingsLimits'},
'search_space': {'key': 'searchSpace', 'type': '[{str}]'},
'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'},
'early_termination': {'key': 'earlyTermination', 'type': 'AetherSweepEarlyTerminationPolicy'},
}
def __init__(
self,
**kwargs
):
"""
:keyword limits:
:paramtype limits: ~flow.models.AetherSweepSettingsLimits
:keyword search_space:
:paramtype search_space: list[dict[str, str]]
:keyword sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian".
:paramtype sampling_algorithm: str or ~flow.models.AetherSamplingAlgorithmType
:keyword early_termination:
:paramtype early_termination: ~flow.models.AetherSweepEarlyTerminationPolicy
"""
super(AetherSweepSettings, self).__init__(**kwargs)
self.limits = kwargs.get('limits', None)
self.search_space = kwargs.get('search_space', None)
self.sampling_algorithm = kwargs.get('sampling_algorithm', None)
self.early_termination = kwargs.get('early_termination', None)
class AetherSweepSettingsLimits(msrest.serialization.Model):
"""AetherSweepSettingsLimits.
:ivar max_total_trials:
:vartype max_total_trials: int
:ivar max_concurrent_trials:
:vartype max_concurrent_trials: int
"""
_attribute_map = {
'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'},
'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_total_trials:
:paramtype max_total_trials: int
:keyword max_concurrent_trials:
:paramtype max_concurrent_trials: int
"""
super(AetherSweepSettingsLimits, self).__init__(**kwargs)
self.max_total_trials = kwargs.get('max_total_trials', None)
self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None)
class AetherTargetLags(msrest.serialization.Model):
"""AetherTargetLags.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.AetherTargetLagsMode
:ivar values:
:vartype values: list[int]
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'values': {'key': 'values', 'type': '[int]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.AetherTargetLagsMode
:keyword values:
:paramtype values: list[int]
"""
super(AetherTargetLags, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.values = kwargs.get('values', None)
class AetherTargetRollingWindowSize(msrest.serialization.Model):
"""AetherTargetRollingWindowSize.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.AetherTargetRollingWindowSizeMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.AetherTargetRollingWindowSizeMode
:keyword value:
:paramtype value: int
"""
super(AetherTargetRollingWindowSize, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class AetherTargetSelectorConfiguration(msrest.serialization.Model):
"""AetherTargetSelectorConfiguration.
:ivar low_priority_vm_tolerant:
:vartype low_priority_vm_tolerant: bool
:ivar cluster_block_list:
:vartype cluster_block_list: list[str]
:ivar compute_type:
:vartype compute_type: str
:ivar instance_type:
:vartype instance_type: list[str]
:ivar instance_types:
:vartype instance_types: list[str]
:ivar my_resource_only:
:vartype my_resource_only: bool
:ivar plan_id:
:vartype plan_id: str
:ivar plan_region_id:
:vartype plan_region_id: str
:ivar region:
:vartype region: list[str]
:ivar regions:
:vartype regions: list[str]
:ivar vc_block_list:
:vartype vc_block_list: list[str]
"""
_attribute_map = {
'low_priority_vm_tolerant': {'key': 'lowPriorityVMTolerant', 'type': 'bool'},
'cluster_block_list': {'key': 'clusterBlockList', 'type': '[str]'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'instance_type': {'key': 'instanceType', 'type': '[str]'},
'instance_types': {'key': 'instanceTypes', 'type': '[str]'},
'my_resource_only': {'key': 'myResourceOnly', 'type': 'bool'},
'plan_id': {'key': 'planId', 'type': 'str'},
'plan_region_id': {'key': 'planRegionId', 'type': 'str'},
'region': {'key': 'region', 'type': '[str]'},
'regions': {'key': 'regions', 'type': '[str]'},
'vc_block_list': {'key': 'vcBlockList', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword low_priority_vm_tolerant:
:paramtype low_priority_vm_tolerant: bool
:keyword cluster_block_list:
:paramtype cluster_block_list: list[str]
:keyword compute_type:
:paramtype compute_type: str
:keyword instance_type:
:paramtype instance_type: list[str]
:keyword instance_types:
:paramtype instance_types: list[str]
:keyword my_resource_only:
:paramtype my_resource_only: bool
:keyword plan_id:
:paramtype plan_id: str
:keyword plan_region_id:
:paramtype plan_region_id: str
:keyword region:
:paramtype region: list[str]
:keyword regions:
:paramtype regions: list[str]
:keyword vc_block_list:
:paramtype vc_block_list: list[str]
"""
super(AetherTargetSelectorConfiguration, self).__init__(**kwargs)
self.low_priority_vm_tolerant = kwargs.get('low_priority_vm_tolerant', None)
self.cluster_block_list = kwargs.get('cluster_block_list', None)
self.compute_type = kwargs.get('compute_type', None)
self.instance_type = kwargs.get('instance_type', None)
self.instance_types = kwargs.get('instance_types', None)
self.my_resource_only = kwargs.get('my_resource_only', None)
self.plan_id = kwargs.get('plan_id', None)
self.plan_region_id = kwargs.get('plan_region_id', None)
self.region = kwargs.get('region', None)
self.regions = kwargs.get('regions', None)
self.vc_block_list = kwargs.get('vc_block_list', None)
class AetherTestDataSettings(msrest.serialization.Model):
"""AetherTestDataSettings.
:ivar test_data_size:
:vartype test_data_size: float
"""
_attribute_map = {
'test_data_size': {'key': 'testDataSize', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
"""
:keyword test_data_size:
:paramtype test_data_size: float
"""
super(AetherTestDataSettings, self).__init__(**kwargs)
self.test_data_size = kwargs.get('test_data_size', None)
class AetherTorchDistributedConfiguration(msrest.serialization.Model):
"""AetherTorchDistributedConfiguration.
:ivar process_count_per_node:
:vartype process_count_per_node: int
"""
_attribute_map = {
'process_count_per_node': {'key': 'processCountPerNode', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword process_count_per_node:
:paramtype process_count_per_node: int
"""
super(AetherTorchDistributedConfiguration, self).__init__(**kwargs)
self.process_count_per_node = kwargs.get('process_count_per_node', None)
class AetherTrainingOutput(msrest.serialization.Model):
"""AetherTrainingOutput.
:ivar training_output_type: Possible values include: "Metrics", "Model".
:vartype training_output_type: str or ~flow.models.AetherTrainingOutputType
:ivar iteration:
:vartype iteration: int
:ivar metric:
:vartype metric: str
:ivar model_file:
:vartype model_file: str
"""
_attribute_map = {
'training_output_type': {'key': 'trainingOutputType', 'type': 'str'},
'iteration': {'key': 'iteration', 'type': 'int'},
'metric': {'key': 'metric', 'type': 'str'},
'model_file': {'key': 'modelFile', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword training_output_type: Possible values include: "Metrics", "Model".
:paramtype training_output_type: str or ~flow.models.AetherTrainingOutputType
:keyword iteration:
:paramtype iteration: int
:keyword metric:
:paramtype metric: str
:keyword model_file:
:paramtype model_file: str
"""
super(AetherTrainingOutput, self).__init__(**kwargs)
self.training_output_type = kwargs.get('training_output_type', None)
self.iteration = kwargs.get('iteration', None)
self.metric = kwargs.get('metric', None)
self.model_file = kwargs.get('model_file', None)
class AetherTrainingSettings(msrest.serialization.Model):
"""AetherTrainingSettings.
:ivar block_list_models:
:vartype block_list_models: list[str]
:ivar allow_list_models:
:vartype allow_list_models: list[str]
:ivar enable_dnn_training:
:vartype enable_dnn_training: bool
:ivar enable_onnx_compatible_models:
:vartype enable_onnx_compatible_models: bool
:ivar stack_ensemble_settings:
:vartype stack_ensemble_settings: ~flow.models.AetherStackEnsembleSettings
:ivar enable_stack_ensemble:
:vartype enable_stack_ensemble: bool
:ivar enable_vote_ensemble:
:vartype enable_vote_ensemble: bool
:ivar ensemble_model_download_timeout:
:vartype ensemble_model_download_timeout: str
:ivar enable_model_explainability:
:vartype enable_model_explainability: bool
:ivar training_mode: Possible values include: "Distributed", "NonDistributed", "Auto".
:vartype training_mode: str or ~flow.models.AetherTabularTrainingMode
"""
_attribute_map = {
'block_list_models': {'key': 'blockListModels', 'type': '[str]'},
'allow_list_models': {'key': 'allowListModels', 'type': '[str]'},
'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'},
'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'},
'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'AetherStackEnsembleSettings'},
'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'},
'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'},
'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'str'},
'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'},
'training_mode': {'key': 'trainingMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword block_list_models:
:paramtype block_list_models: list[str]
:keyword allow_list_models:
:paramtype allow_list_models: list[str]
:keyword enable_dnn_training:
:paramtype enable_dnn_training: bool
:keyword enable_onnx_compatible_models:
:paramtype enable_onnx_compatible_models: bool
:keyword stack_ensemble_settings:
:paramtype stack_ensemble_settings: ~flow.models.AetherStackEnsembleSettings
:keyword enable_stack_ensemble:
:paramtype enable_stack_ensemble: bool
:keyword enable_vote_ensemble:
:paramtype enable_vote_ensemble: bool
:keyword ensemble_model_download_timeout:
:paramtype ensemble_model_download_timeout: str
:keyword enable_model_explainability:
:paramtype enable_model_explainability: bool
:keyword training_mode: Possible values include: "Distributed", "NonDistributed", "Auto".
:paramtype training_mode: str or ~flow.models.AetherTabularTrainingMode
"""
super(AetherTrainingSettings, self).__init__(**kwargs)
self.block_list_models = kwargs.get('block_list_models', None)
self.allow_list_models = kwargs.get('allow_list_models', None)
self.enable_dnn_training = kwargs.get('enable_dnn_training', None)
self.enable_onnx_compatible_models = kwargs.get('enable_onnx_compatible_models', None)
self.stack_ensemble_settings = kwargs.get('stack_ensemble_settings', None)
self.enable_stack_ensemble = kwargs.get('enable_stack_ensemble', None)
self.enable_vote_ensemble = kwargs.get('enable_vote_ensemble', None)
self.ensemble_model_download_timeout = kwargs.get('ensemble_model_download_timeout', None)
self.enable_model_explainability = kwargs.get('enable_model_explainability', None)
self.training_mode = kwargs.get('training_mode', None)
class AetherUIAzureOpenAIDeploymentNameSelector(msrest.serialization.Model):
"""AetherUIAzureOpenAIDeploymentNameSelector.
:ivar capabilities:
:vartype capabilities: ~flow.models.AetherUIAzureOpenAIModelCapabilities
"""
_attribute_map = {
'capabilities': {'key': 'Capabilities', 'type': 'AetherUIAzureOpenAIModelCapabilities'},
}
def __init__(
self,
**kwargs
):
"""
:keyword capabilities:
:paramtype capabilities: ~flow.models.AetherUIAzureOpenAIModelCapabilities
"""
super(AetherUIAzureOpenAIDeploymentNameSelector, self).__init__(**kwargs)
self.capabilities = kwargs.get('capabilities', None)
class AetherUIAzureOpenAIModelCapabilities(msrest.serialization.Model):
"""AetherUIAzureOpenAIModelCapabilities.
:ivar completion:
:vartype completion: bool
:ivar chat_completion:
:vartype chat_completion: bool
:ivar embeddings:
:vartype embeddings: bool
"""
_attribute_map = {
'completion': {'key': 'Completion', 'type': 'bool'},
'chat_completion': {'key': 'ChatCompletion', 'type': 'bool'},
'embeddings': {'key': 'Embeddings', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword completion:
:paramtype completion: bool
:keyword chat_completion:
:paramtype chat_completion: bool
:keyword embeddings:
:paramtype embeddings: bool
"""
super(AetherUIAzureOpenAIModelCapabilities, self).__init__(**kwargs)
self.completion = kwargs.get('completion', None)
self.chat_completion = kwargs.get('chat_completion', None)
self.embeddings = kwargs.get('embeddings', None)
class AetherUIColumnPicker(msrest.serialization.Model):
"""AetherUIColumnPicker.
:ivar column_picker_for:
:vartype column_picker_for: str
:ivar column_selection_categories:
:vartype column_selection_categories: list[str]
:ivar single_column_selection:
:vartype single_column_selection: bool
"""
_attribute_map = {
'column_picker_for': {'key': 'columnPickerFor', 'type': 'str'},
'column_selection_categories': {'key': 'columnSelectionCategories', 'type': '[str]'},
'single_column_selection': {'key': 'singleColumnSelection', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword column_picker_for:
:paramtype column_picker_for: str
:keyword column_selection_categories:
:paramtype column_selection_categories: list[str]
:keyword single_column_selection:
:paramtype single_column_selection: bool
"""
super(AetherUIColumnPicker, self).__init__(**kwargs)
self.column_picker_for = kwargs.get('column_picker_for', None)
self.column_selection_categories = kwargs.get('column_selection_categories', None)
self.single_column_selection = kwargs.get('single_column_selection', None)
class AetherUIJsonEditor(msrest.serialization.Model):
"""AetherUIJsonEditor.
:ivar json_schema:
:vartype json_schema: str
"""
_attribute_map = {
'json_schema': {'key': 'jsonSchema', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword json_schema:
:paramtype json_schema: str
"""
super(AetherUIJsonEditor, self).__init__(**kwargs)
self.json_schema = kwargs.get('json_schema', None)
class AetherUIParameterHint(msrest.serialization.Model):
"""AetherUIParameterHint.
:ivar ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential",
"Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle",
"YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection",
"ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection".
:vartype ui_widget_type: str or ~flow.models.AetherUIWidgetTypeEnum
:ivar column_picker:
:vartype column_picker: ~flow.models.AetherUIColumnPicker
:ivar ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql".
:vartype ui_script_language: str or ~flow.models.AetherUIScriptLanguageEnum
:ivar json_editor:
:vartype json_editor: ~flow.models.AetherUIJsonEditor
:ivar prompt_flow_connection_selector:
:vartype prompt_flow_connection_selector: ~flow.models.AetherUIPromptFlowConnectionSelector
:ivar azure_open_ai_deployment_name_selector:
:vartype azure_open_ai_deployment_name_selector:
~flow.models.AetherUIAzureOpenAIDeploymentNameSelector
:ivar ux_ignore:
:vartype ux_ignore: bool
:ivar anonymous:
:vartype anonymous: bool
"""
_attribute_map = {
'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'},
'column_picker': {'key': 'columnPicker', 'type': 'AetherUIColumnPicker'},
'ui_script_language': {'key': 'uiScriptLanguage', 'type': 'str'},
'json_editor': {'key': 'jsonEditor', 'type': 'AetherUIJsonEditor'},
'prompt_flow_connection_selector': {'key': 'PromptFlowConnectionSelector', 'type': 'AetherUIPromptFlowConnectionSelector'},
'azure_open_ai_deployment_name_selector': {'key': 'AzureOpenAIDeploymentNameSelector', 'type': 'AetherUIAzureOpenAIDeploymentNameSelector'},
'ux_ignore': {'key': 'UxIgnore', 'type': 'bool'},
'anonymous': {'key': 'Anonymous', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker",
"Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter",
"SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection",
"InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection",
"AzureOpenAIDeploymentNameSelection".
:paramtype ui_widget_type: str or ~flow.models.AetherUIWidgetTypeEnum
:keyword column_picker:
:paramtype column_picker: ~flow.models.AetherUIColumnPicker
:keyword ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql".
:paramtype ui_script_language: str or ~flow.models.AetherUIScriptLanguageEnum
:keyword json_editor:
:paramtype json_editor: ~flow.models.AetherUIJsonEditor
:keyword prompt_flow_connection_selector:
:paramtype prompt_flow_connection_selector: ~flow.models.AetherUIPromptFlowConnectionSelector
:keyword azure_open_ai_deployment_name_selector:
:paramtype azure_open_ai_deployment_name_selector:
~flow.models.AetherUIAzureOpenAIDeploymentNameSelector
:keyword ux_ignore:
:paramtype ux_ignore: bool
:keyword anonymous:
:paramtype anonymous: bool
"""
super(AetherUIParameterHint, self).__init__(**kwargs)
self.ui_widget_type = kwargs.get('ui_widget_type', None)
self.column_picker = kwargs.get('column_picker', None)
self.ui_script_language = kwargs.get('ui_script_language', None)
self.json_editor = kwargs.get('json_editor', None)
self.prompt_flow_connection_selector = kwargs.get('prompt_flow_connection_selector', None)
self.azure_open_ai_deployment_name_selector = kwargs.get('azure_open_ai_deployment_name_selector', None)
self.ux_ignore = kwargs.get('ux_ignore', None)
self.anonymous = kwargs.get('anonymous', None)
class AetherUIPromptFlowConnectionSelector(msrest.serialization.Model):
"""AetherUIPromptFlowConnectionSelector.
:ivar prompt_flow_connection_type:
:vartype prompt_flow_connection_type: str
"""
_attribute_map = {
'prompt_flow_connection_type': {'key': 'PromptFlowConnectionType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword prompt_flow_connection_type:
:paramtype prompt_flow_connection_type: str
"""
super(AetherUIPromptFlowConnectionSelector, self).__init__(**kwargs)
self.prompt_flow_connection_type = kwargs.get('prompt_flow_connection_type', None)
class AetherValidationDataSettings(msrest.serialization.Model):
"""AetherValidationDataSettings.
:ivar n_cross_validations:
:vartype n_cross_validations: ~flow.models.AetherNCrossValidations
:ivar validation_data_size:
:vartype validation_data_size: float
:ivar cv_split_column_names:
:vartype cv_split_column_names: list[str]
:ivar validation_type:
:vartype validation_type: str
"""
_attribute_map = {
'n_cross_validations': {'key': 'nCrossValidations', 'type': 'AetherNCrossValidations'},
'validation_data_size': {'key': 'validationDataSize', 'type': 'float'},
'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'},
'validation_type': {'key': 'validationType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword n_cross_validations:
:paramtype n_cross_validations: ~flow.models.AetherNCrossValidations
:keyword validation_data_size:
:paramtype validation_data_size: float
:keyword cv_split_column_names:
:paramtype cv_split_column_names: list[str]
:keyword validation_type:
:paramtype validation_type: str
"""
super(AetherValidationDataSettings, self).__init__(**kwargs)
self.n_cross_validations = kwargs.get('n_cross_validations', None)
self.validation_data_size = kwargs.get('validation_data_size', None)
self.cv_split_column_names = kwargs.get('cv_split_column_names', None)
self.validation_type = kwargs.get('validation_type', None)
class AetherVsoBuildArtifactInfo(msrest.serialization.Model):
"""AetherVsoBuildArtifactInfo.
:ivar build_info:
:vartype build_info: ~flow.models.AetherVsoBuildInfo
:ivar download_url:
:vartype download_url: str
"""
_attribute_map = {
'build_info': {'key': 'buildInfo', 'type': 'AetherVsoBuildInfo'},
'download_url': {'key': 'downloadUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword build_info:
:paramtype build_info: ~flow.models.AetherVsoBuildInfo
:keyword download_url:
:paramtype download_url: str
"""
super(AetherVsoBuildArtifactInfo, self).__init__(**kwargs)
self.build_info = kwargs.get('build_info', None)
self.download_url = kwargs.get('download_url', None)
class AetherVsoBuildDefinitionInfo(msrest.serialization.Model):
"""AetherVsoBuildDefinitionInfo.
:ivar account_name:
:vartype account_name: str
:ivar project_id:
:vartype project_id: str
:ivar build_definition_id:
:vartype build_definition_id: int
"""
_attribute_map = {
'account_name': {'key': 'accountName', 'type': 'str'},
'project_id': {'key': 'projectId', 'type': 'str'},
'build_definition_id': {'key': 'buildDefinitionId', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword account_name:
:paramtype account_name: str
:keyword project_id:
:paramtype project_id: str
:keyword build_definition_id:
:paramtype build_definition_id: int
"""
super(AetherVsoBuildDefinitionInfo, self).__init__(**kwargs)
self.account_name = kwargs.get('account_name', None)
self.project_id = kwargs.get('project_id', None)
self.build_definition_id = kwargs.get('build_definition_id', None)
class AetherVsoBuildInfo(msrest.serialization.Model):
"""AetherVsoBuildInfo.
:ivar definition_info:
:vartype definition_info: ~flow.models.AetherVsoBuildDefinitionInfo
:ivar build_id:
:vartype build_id: int
"""
_attribute_map = {
'definition_info': {'key': 'definitionInfo', 'type': 'AetherVsoBuildDefinitionInfo'},
'build_id': {'key': 'buildId', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword definition_info:
:paramtype definition_info: ~flow.models.AetherVsoBuildDefinitionInfo
:keyword build_id:
:paramtype build_id: int
"""
super(AetherVsoBuildInfo, self).__init__(**kwargs)
self.definition_info = kwargs.get('definition_info', None)
self.build_id = kwargs.get('build_id', None)
class AEVAComputeConfiguration(msrest.serialization.Model):
"""AEVAComputeConfiguration.
:ivar target:
:vartype target: str
:ivar instance_count:
:vartype instance_count: int
:ivar is_local:
:vartype is_local: bool
:ivar location:
:vartype location: str
:ivar is_clusterless:
:vartype is_clusterless: bool
:ivar instance_type:
:vartype instance_type: str
:ivar properties: Dictionary of :code:`<any>`.
:vartype properties: dict[str, any]
:ivar is_preemptable:
:vartype is_preemptable: bool
"""
_attribute_map = {
'target': {'key': 'target', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'is_local': {'key': 'isLocal', 'type': 'bool'},
'location': {'key': 'location', 'type': 'str'},
'is_clusterless': {'key': 'isClusterless', 'type': 'bool'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
'is_preemptable': {'key': 'isPreemptable', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword target:
:paramtype target: str
:keyword instance_count:
:paramtype instance_count: int
:keyword is_local:
:paramtype is_local: bool
:keyword location:
:paramtype location: str
:keyword is_clusterless:
:paramtype is_clusterless: bool
:keyword instance_type:
:paramtype instance_type: str
:keyword properties: Dictionary of :code:`<any>`.
:paramtype properties: dict[str, any]
:keyword is_preemptable:
:paramtype is_preemptable: bool
"""
super(AEVAComputeConfiguration, self).__init__(**kwargs)
self.target = kwargs.get('target', None)
self.instance_count = kwargs.get('instance_count', None)
self.is_local = kwargs.get('is_local', None)
self.location = kwargs.get('location', None)
self.is_clusterless = kwargs.get('is_clusterless', None)
self.instance_type = kwargs.get('instance_type', None)
self.properties = kwargs.get('properties', None)
self.is_preemptable = kwargs.get('is_preemptable', None)
class AEVAResourceConfiguration(msrest.serialization.Model):
"""AEVAResourceConfiguration.
:ivar instance_count:
:vartype instance_count: int
:ivar instance_type:
:vartype instance_type: str
:ivar properties: Dictionary of :code:`<any>`.
:vartype properties: dict[str, any]
:ivar locations:
:vartype locations: list[str]
:ivar instance_priority:
:vartype instance_priority: str
:ivar quota_enforcement_resource_id:
:vartype quota_enforcement_resource_id: str
"""
_attribute_map = {
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
'locations': {'key': 'locations', 'type': '[str]'},
'instance_priority': {'key': 'instancePriority', 'type': 'str'},
'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword instance_count:
:paramtype instance_count: int
:keyword instance_type:
:paramtype instance_type: str
:keyword properties: Dictionary of :code:`<any>`.
:paramtype properties: dict[str, any]
:keyword locations:
:paramtype locations: list[str]
:keyword instance_priority:
:paramtype instance_priority: str
:keyword quota_enforcement_resource_id:
:paramtype quota_enforcement_resource_id: str
"""
super(AEVAResourceConfiguration, self).__init__(**kwargs)
self.instance_count = kwargs.get('instance_count', None)
self.instance_type = kwargs.get('instance_type', None)
self.properties = kwargs.get('properties', None)
self.locations = kwargs.get('locations', None)
self.instance_priority = kwargs.get('instance_priority', None)
self.quota_enforcement_resource_id = kwargs.get('quota_enforcement_resource_id', None)
class AISuperComputerConfiguration(msrest.serialization.Model):
"""AISuperComputerConfiguration.
:ivar instance_type:
:vartype instance_type: str
:ivar instance_types:
:vartype instance_types: list[str]
:ivar image_version:
:vartype image_version: str
:ivar location:
:vartype location: str
:ivar locations:
:vartype locations: list[str]
:ivar ai_super_computer_storage_data: Dictionary of
:code:`<AISuperComputerStorageReferenceConfiguration>`.
:vartype ai_super_computer_storage_data: dict[str,
~flow.models.AISuperComputerStorageReferenceConfiguration]
:ivar interactive:
:vartype interactive: bool
:ivar scale_policy:
:vartype scale_policy: ~flow.models.AISuperComputerScalePolicy
:ivar virtual_cluster_arm_id:
:vartype virtual_cluster_arm_id: str
:ivar tensorboard_log_directory:
:vartype tensorboard_log_directory: str
:ivar ssh_public_key:
:vartype ssh_public_key: str
:ivar ssh_public_keys:
:vartype ssh_public_keys: list[str]
:ivar enable_azml_int:
:vartype enable_azml_int: bool
:ivar priority:
:vartype priority: str
:ivar sla_tier:
:vartype sla_tier: str
:ivar suspend_on_idle_time_hours:
:vartype suspend_on_idle_time_hours: long
:ivar user_alias:
:vartype user_alias: str
:ivar quota_enforcement_resource_id:
:vartype quota_enforcement_resource_id: str
:ivar model_compute_specification_id:
:vartype model_compute_specification_id: str
:ivar group_policy_name:
:vartype group_policy_name: str
"""
_attribute_map = {
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_types': {'key': 'instanceTypes', 'type': '[str]'},
'image_version': {'key': 'imageVersion', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'locations': {'key': 'locations', 'type': '[str]'},
'ai_super_computer_storage_data': {'key': 'aiSuperComputerStorageData', 'type': '{AISuperComputerStorageReferenceConfiguration}'},
'interactive': {'key': 'interactive', 'type': 'bool'},
'scale_policy': {'key': 'scalePolicy', 'type': 'AISuperComputerScalePolicy'},
'virtual_cluster_arm_id': {'key': 'virtualClusterArmId', 'type': 'str'},
'tensorboard_log_directory': {'key': 'tensorboardLogDirectory', 'type': 'str'},
'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'},
'ssh_public_keys': {'key': 'sshPublicKeys', 'type': '[str]'},
'enable_azml_int': {'key': 'enableAzmlInt', 'type': 'bool'},
'priority': {'key': 'priority', 'type': 'str'},
'sla_tier': {'key': 'slaTier', 'type': 'str'},
'suspend_on_idle_time_hours': {'key': 'suspendOnIdleTimeHours', 'type': 'long'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'},
'model_compute_specification_id': {'key': 'modelComputeSpecificationId', 'type': 'str'},
'group_policy_name': {'key': 'groupPolicyName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_types:
:paramtype instance_types: list[str]
:keyword image_version:
:paramtype image_version: str
:keyword location:
:paramtype location: str
:keyword locations:
:paramtype locations: list[str]
:keyword ai_super_computer_storage_data: Dictionary of
:code:`<AISuperComputerStorageReferenceConfiguration>`.
:paramtype ai_super_computer_storage_data: dict[str,
~flow.models.AISuperComputerStorageReferenceConfiguration]
:keyword interactive:
:paramtype interactive: bool
:keyword scale_policy:
:paramtype scale_policy: ~flow.models.AISuperComputerScalePolicy
:keyword virtual_cluster_arm_id:
:paramtype virtual_cluster_arm_id: str
:keyword tensorboard_log_directory:
:paramtype tensorboard_log_directory: str
:keyword ssh_public_key:
:paramtype ssh_public_key: str
:keyword ssh_public_keys:
:paramtype ssh_public_keys: list[str]
:keyword enable_azml_int:
:paramtype enable_azml_int: bool
:keyword priority:
:paramtype priority: str
:keyword sla_tier:
:paramtype sla_tier: str
:keyword suspend_on_idle_time_hours:
:paramtype suspend_on_idle_time_hours: long
:keyword user_alias:
:paramtype user_alias: str
:keyword quota_enforcement_resource_id:
:paramtype quota_enforcement_resource_id: str
:keyword model_compute_specification_id:
:paramtype model_compute_specification_id: str
:keyword group_policy_name:
:paramtype group_policy_name: str
"""
super(AISuperComputerConfiguration, self).__init__(**kwargs)
self.instance_type = kwargs.get('instance_type', None)
self.instance_types = kwargs.get('instance_types', None)
self.image_version = kwargs.get('image_version', None)
self.location = kwargs.get('location', None)
self.locations = kwargs.get('locations', None)
self.ai_super_computer_storage_data = kwargs.get('ai_super_computer_storage_data', None)
self.interactive = kwargs.get('interactive', None)
self.scale_policy = kwargs.get('scale_policy', None)
self.virtual_cluster_arm_id = kwargs.get('virtual_cluster_arm_id', None)
self.tensorboard_log_directory = kwargs.get('tensorboard_log_directory', None)
self.ssh_public_key = kwargs.get('ssh_public_key', None)
self.ssh_public_keys = kwargs.get('ssh_public_keys', None)
self.enable_azml_int = kwargs.get('enable_azml_int', None)
self.priority = kwargs.get('priority', None)
self.sla_tier = kwargs.get('sla_tier', None)
self.suspend_on_idle_time_hours = kwargs.get('suspend_on_idle_time_hours', None)
self.user_alias = kwargs.get('user_alias', None)
self.quota_enforcement_resource_id = kwargs.get('quota_enforcement_resource_id', None)
self.model_compute_specification_id = kwargs.get('model_compute_specification_id', None)
self.group_policy_name = kwargs.get('group_policy_name', None)
class AISuperComputerScalePolicy(msrest.serialization.Model):
"""AISuperComputerScalePolicy.
:ivar auto_scale_instance_type_count_set:
:vartype auto_scale_instance_type_count_set: list[int]
:ivar auto_scale_interval_in_sec:
:vartype auto_scale_interval_in_sec: int
:ivar max_instance_type_count:
:vartype max_instance_type_count: int
:ivar min_instance_type_count:
:vartype min_instance_type_count: int
"""
_attribute_map = {
'auto_scale_instance_type_count_set': {'key': 'autoScaleInstanceTypeCountSet', 'type': '[int]'},
'auto_scale_interval_in_sec': {'key': 'autoScaleIntervalInSec', 'type': 'int'},
'max_instance_type_count': {'key': 'maxInstanceTypeCount', 'type': 'int'},
'min_instance_type_count': {'key': 'minInstanceTypeCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword auto_scale_instance_type_count_set:
:paramtype auto_scale_instance_type_count_set: list[int]
:keyword auto_scale_interval_in_sec:
:paramtype auto_scale_interval_in_sec: int
:keyword max_instance_type_count:
:paramtype max_instance_type_count: int
:keyword min_instance_type_count:
:paramtype min_instance_type_count: int
"""
super(AISuperComputerScalePolicy, self).__init__(**kwargs)
self.auto_scale_instance_type_count_set = kwargs.get('auto_scale_instance_type_count_set', None)
self.auto_scale_interval_in_sec = kwargs.get('auto_scale_interval_in_sec', None)
self.max_instance_type_count = kwargs.get('max_instance_type_count', None)
self.min_instance_type_count = kwargs.get('min_instance_type_count', None)
class AISuperComputerStorageReferenceConfiguration(msrest.serialization.Model):
"""AISuperComputerStorageReferenceConfiguration.
:ivar container_name:
:vartype container_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'container_name': {'key': 'containerName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword container_name:
:paramtype container_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(AISuperComputerStorageReferenceConfiguration, self).__init__(**kwargs)
self.container_name = kwargs.get('container_name', None)
self.relative_path = kwargs.get('relative_path', None)
class AKSAdvanceSettings(msrest.serialization.Model):
"""AKSAdvanceSettings.
:ivar auto_scaler:
:vartype auto_scaler: ~flow.models.AutoScaler
:ivar container_resource_requirements:
:vartype container_resource_requirements: ~flow.models.ContainerResourceRequirements
:ivar app_insights_enabled:
:vartype app_insights_enabled: bool
:ivar scoring_timeout_ms:
:vartype scoring_timeout_ms: int
:ivar num_replicas:
:vartype num_replicas: int
"""
_attribute_map = {
'auto_scaler': {'key': 'autoScaler', 'type': 'AutoScaler'},
'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
'num_replicas': {'key': 'numReplicas', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword auto_scaler:
:paramtype auto_scaler: ~flow.models.AutoScaler
:keyword container_resource_requirements:
:paramtype container_resource_requirements: ~flow.models.ContainerResourceRequirements
:keyword app_insights_enabled:
:paramtype app_insights_enabled: bool
:keyword scoring_timeout_ms:
:paramtype scoring_timeout_ms: int
:keyword num_replicas:
:paramtype num_replicas: int
"""
super(AKSAdvanceSettings, self).__init__(**kwargs)
self.auto_scaler = kwargs.get('auto_scaler', None)
self.container_resource_requirements = kwargs.get('container_resource_requirements', None)
self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
self.scoring_timeout_ms = kwargs.get('scoring_timeout_ms', None)
self.num_replicas = kwargs.get('num_replicas', None)
class AKSReplicaStatus(msrest.serialization.Model):
"""AKSReplicaStatus.
:ivar desired_replicas:
:vartype desired_replicas: int
:ivar updated_replicas:
:vartype updated_replicas: int
:ivar available_replicas:
:vartype available_replicas: int
:ivar error:
:vartype error: ~flow.models.ModelManagementErrorResponse
"""
_attribute_map = {
'desired_replicas': {'key': 'desiredReplicas', 'type': 'int'},
'updated_replicas': {'key': 'updatedReplicas', 'type': 'int'},
'available_replicas': {'key': 'availableReplicas', 'type': 'int'},
'error': {'key': 'error', 'type': 'ModelManagementErrorResponse'},
}
def __init__(
self,
**kwargs
):
"""
:keyword desired_replicas:
:paramtype desired_replicas: int
:keyword updated_replicas:
:paramtype updated_replicas: int
:keyword available_replicas:
:paramtype available_replicas: int
:keyword error:
:paramtype error: ~flow.models.ModelManagementErrorResponse
"""
super(AKSReplicaStatus, self).__init__(**kwargs)
self.desired_replicas = kwargs.get('desired_replicas', None)
self.updated_replicas = kwargs.get('updated_replicas', None)
self.available_replicas = kwargs.get('available_replicas', None)
self.error = kwargs.get('error', None)
class AMLComputeConfiguration(msrest.serialization.Model):
"""AMLComputeConfiguration.
:ivar name:
:vartype name: str
:ivar vm_size:
:vartype vm_size: str
:ivar vm_priority: Possible values include: "Dedicated", "Lowpriority".
:vartype vm_priority: str or ~flow.models.VmPriority
:ivar retain_cluster:
:vartype retain_cluster: bool
:ivar cluster_max_node_count:
:vartype cluster_max_node_count: int
:ivar os_type:
:vartype os_type: str
:ivar virtual_machine_image:
:vartype virtual_machine_image: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'vm_priority': {'key': 'vmPriority', 'type': 'str'},
'retain_cluster': {'key': 'retainCluster', 'type': 'bool'},
'cluster_max_node_count': {'key': 'clusterMaxNodeCount', 'type': 'int'},
'os_type': {'key': 'osType', 'type': 'str'},
'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword vm_size:
:paramtype vm_size: str
:keyword vm_priority: Possible values include: "Dedicated", "Lowpriority".
:paramtype vm_priority: str or ~flow.models.VmPriority
:keyword retain_cluster:
:paramtype retain_cluster: bool
:keyword cluster_max_node_count:
:paramtype cluster_max_node_count: int
:keyword os_type:
:paramtype os_type: str
:keyword virtual_machine_image:
:paramtype virtual_machine_image: str
"""
super(AMLComputeConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.vm_size = kwargs.get('vm_size', None)
self.vm_priority = kwargs.get('vm_priority', None)
self.retain_cluster = kwargs.get('retain_cluster', None)
self.cluster_max_node_count = kwargs.get('cluster_max_node_count', None)
self.os_type = kwargs.get('os_type', None)
self.virtual_machine_image = kwargs.get('virtual_machine_image', None)
class AmlDataset(msrest.serialization.Model):
"""AmlDataset.
:ivar registered_data_set_reference:
:vartype registered_data_set_reference: ~flow.models.RegisteredDataSetReference
:ivar saved_data_set_reference:
:vartype saved_data_set_reference: ~flow.models.SavedDataSetReference
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'registered_data_set_reference': {'key': 'registeredDataSetReference', 'type': 'RegisteredDataSetReference'},
'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'SavedDataSetReference'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword registered_data_set_reference:
:paramtype registered_data_set_reference: ~flow.models.RegisteredDataSetReference
:keyword saved_data_set_reference:
:paramtype saved_data_set_reference: ~flow.models.SavedDataSetReference
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(AmlDataset, self).__init__(**kwargs)
self.registered_data_set_reference = kwargs.get('registered_data_set_reference', None)
self.saved_data_set_reference = kwargs.get('saved_data_set_reference', None)
self.additional_transformations = kwargs.get('additional_transformations', None)
class AmlK8SConfiguration(msrest.serialization.Model):
"""AmlK8SConfiguration.
:ivar resource_configuration:
:vartype resource_configuration: ~flow.models.ResourceConfiguration
:ivar priority_configuration:
:vartype priority_configuration: ~flow.models.AmlK8SPriorityConfiguration
:ivar interactive_configuration:
:vartype interactive_configuration: ~flow.models.InteractiveConfiguration
"""
_attribute_map = {
'resource_configuration': {'key': 'resourceConfiguration', 'type': 'ResourceConfiguration'},
'priority_configuration': {'key': 'priorityConfiguration', 'type': 'AmlK8SPriorityConfiguration'},
'interactive_configuration': {'key': 'interactiveConfiguration', 'type': 'InteractiveConfiguration'},
}
def __init__(
self,
**kwargs
):
"""
:keyword resource_configuration:
:paramtype resource_configuration: ~flow.models.ResourceConfiguration
:keyword priority_configuration:
:paramtype priority_configuration: ~flow.models.AmlK8SPriorityConfiguration
:keyword interactive_configuration:
:paramtype interactive_configuration: ~flow.models.InteractiveConfiguration
"""
super(AmlK8SConfiguration, self).__init__(**kwargs)
self.resource_configuration = kwargs.get('resource_configuration', None)
self.priority_configuration = kwargs.get('priority_configuration', None)
self.interactive_configuration = kwargs.get('interactive_configuration', None)
class AmlK8SPriorityConfiguration(msrest.serialization.Model):
"""AmlK8SPriorityConfiguration.
:ivar job_priority:
:vartype job_priority: int
:ivar is_preemptible:
:vartype is_preemptible: bool
:ivar node_count_set:
:vartype node_count_set: list[int]
:ivar scale_interval:
:vartype scale_interval: int
"""
_attribute_map = {
'job_priority': {'key': 'jobPriority', 'type': 'int'},
'is_preemptible': {'key': 'isPreemptible', 'type': 'bool'},
'node_count_set': {'key': 'nodeCountSet', 'type': '[int]'},
'scale_interval': {'key': 'scaleInterval', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_priority:
:paramtype job_priority: int
:keyword is_preemptible:
:paramtype is_preemptible: bool
:keyword node_count_set:
:paramtype node_count_set: list[int]
:keyword scale_interval:
:paramtype scale_interval: int
"""
super(AmlK8SPriorityConfiguration, self).__init__(**kwargs)
self.job_priority = kwargs.get('job_priority', None)
self.is_preemptible = kwargs.get('is_preemptible', None)
self.node_count_set = kwargs.get('node_count_set', None)
self.scale_interval = kwargs.get('scale_interval', None)
class AmlSparkCloudSetting(msrest.serialization.Model):
"""AmlSparkCloudSetting.
:ivar entry:
:vartype entry: ~flow.models.EntrySetting
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar py_files:
:vartype py_files: list[str]
:ivar driver_memory:
:vartype driver_memory: str
:ivar driver_cores:
:vartype driver_cores: int
:ivar executor_memory:
:vartype executor_memory: str
:ivar executor_cores:
:vartype executor_cores: int
:ivar number_executors:
:vartype number_executors: int
:ivar environment_asset_id:
:vartype environment_asset_id: str
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar inline_environment_definition_string:
:vartype inline_environment_definition_string: str
:ivar conf: Dictionary of :code:`<string>`.
:vartype conf: dict[str, str]
:ivar compute:
:vartype compute: str
:ivar resources:
:vartype resources: ~flow.models.ResourcesSetting
:ivar identity:
:vartype identity: ~flow.models.IdentitySetting
"""
_attribute_map = {
'entry': {'key': 'entry', 'type': 'EntrySetting'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'driver_memory': {'key': 'driverMemory', 'type': 'str'},
'driver_cores': {'key': 'driverCores', 'type': 'int'},
'executor_memory': {'key': 'executorMemory', 'type': 'str'},
'executor_cores': {'key': 'executorCores', 'type': 'int'},
'number_executors': {'key': 'numberExecutors', 'type': 'int'},
'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'inline_environment_definition_string': {'key': 'inlineEnvironmentDefinitionString', 'type': 'str'},
'conf': {'key': 'conf', 'type': '{str}'},
'compute': {'key': 'compute', 'type': 'str'},
'resources': {'key': 'resources', 'type': 'ResourcesSetting'},
'identity': {'key': 'identity', 'type': 'IdentitySetting'},
}
def __init__(
self,
**kwargs
):
"""
:keyword entry:
:paramtype entry: ~flow.models.EntrySetting
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword py_files:
:paramtype py_files: list[str]
:keyword driver_memory:
:paramtype driver_memory: str
:keyword driver_cores:
:paramtype driver_cores: int
:keyword executor_memory:
:paramtype executor_memory: str
:keyword executor_cores:
:paramtype executor_cores: int
:keyword number_executors:
:paramtype number_executors: int
:keyword environment_asset_id:
:paramtype environment_asset_id: str
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword inline_environment_definition_string:
:paramtype inline_environment_definition_string: str
:keyword conf: Dictionary of :code:`<string>`.
:paramtype conf: dict[str, str]
:keyword compute:
:paramtype compute: str
:keyword resources:
:paramtype resources: ~flow.models.ResourcesSetting
:keyword identity:
:paramtype identity: ~flow.models.IdentitySetting
"""
super(AmlSparkCloudSetting, self).__init__(**kwargs)
self.entry = kwargs.get('entry', None)
self.files = kwargs.get('files', None)
self.archives = kwargs.get('archives', None)
self.jars = kwargs.get('jars', None)
self.py_files = kwargs.get('py_files', None)
self.driver_memory = kwargs.get('driver_memory', None)
self.driver_cores = kwargs.get('driver_cores', None)
self.executor_memory = kwargs.get('executor_memory', None)
self.executor_cores = kwargs.get('executor_cores', None)
self.number_executors = kwargs.get('number_executors', None)
self.environment_asset_id = kwargs.get('environment_asset_id', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.inline_environment_definition_string = kwargs.get('inline_environment_definition_string', None)
self.conf = kwargs.get('conf', None)
self.compute = kwargs.get('compute', None)
self.resources = kwargs.get('resources', None)
self.identity = kwargs.get('identity', None)
class APCloudConfiguration(msrest.serialization.Model):
"""APCloudConfiguration.
:ivar referenced_ap_module_guid:
:vartype referenced_ap_module_guid: str
:ivar user_alias:
:vartype user_alias: str
:ivar aether_module_type:
:vartype aether_module_type: str
:ivar allow_overwrite:
:vartype allow_overwrite: bool
:ivar destination_expiration_days:
:vartype destination_expiration_days: int
:ivar should_respect_line_boundaries:
:vartype should_respect_line_boundaries: bool
"""
_attribute_map = {
'referenced_ap_module_guid': {'key': 'referencedAPModuleGuid', 'type': 'str'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'aether_module_type': {'key': 'aetherModuleType', 'type': 'str'},
'allow_overwrite': {'key': 'allowOverwrite', 'type': 'bool'},
'destination_expiration_days': {'key': 'destinationExpirationDays', 'type': 'int'},
'should_respect_line_boundaries': {'key': 'shouldRespectLineBoundaries', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword referenced_ap_module_guid:
:paramtype referenced_ap_module_guid: str
:keyword user_alias:
:paramtype user_alias: str
:keyword aether_module_type:
:paramtype aether_module_type: str
:keyword allow_overwrite:
:paramtype allow_overwrite: bool
:keyword destination_expiration_days:
:paramtype destination_expiration_days: int
:keyword should_respect_line_boundaries:
:paramtype should_respect_line_boundaries: bool
"""
super(APCloudConfiguration, self).__init__(**kwargs)
self.referenced_ap_module_guid = kwargs.get('referenced_ap_module_guid', None)
self.user_alias = kwargs.get('user_alias', None)
self.aether_module_type = kwargs.get('aether_module_type', None)
self.allow_overwrite = kwargs.get('allow_overwrite', None)
self.destination_expiration_days = kwargs.get('destination_expiration_days', None)
self.should_respect_line_boundaries = kwargs.get('should_respect_line_boundaries', None)
class ApiAndParameters(msrest.serialization.Model):
"""ApiAndParameters.
:ivar api:
:vartype api: str
:ivar parameters: This is a dictionary.
:vartype parameters: dict[str, ~flow.models.FlowToolSettingParameter]
:ivar default_prompt:
:vartype default_prompt: str
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{FlowToolSettingParameter}'},
'default_prompt': {'key': 'default_prompt', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword api:
:paramtype api: str
:keyword parameters: This is a dictionary.
:paramtype parameters: dict[str, ~flow.models.FlowToolSettingParameter]
:keyword default_prompt:
:paramtype default_prompt: str
"""
super(ApiAndParameters, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.parameters = kwargs.get('parameters', None)
self.default_prompt = kwargs.get('default_prompt', None)
class ApplicationEndpointConfiguration(msrest.serialization.Model):
"""ApplicationEndpointConfiguration.
:ivar type: Possible values include: "Jupyter", "JupyterLab", "SSH", "TensorBoard", "VSCode",
"Theia", "Grafana", "Custom", "RayDashboard".
:vartype type: str or ~flow.models.ApplicationEndpointType
:ivar port:
:vartype port: int
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar nodes:
:vartype nodes: ~flow.models.Nodes
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
'properties': {'key': 'properties', 'type': '{str}'},
'nodes': {'key': 'nodes', 'type': 'Nodes'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "Jupyter", "JupyterLab", "SSH", "TensorBoard",
"VSCode", "Theia", "Grafana", "Custom", "RayDashboard".
:paramtype type: str or ~flow.models.ApplicationEndpointType
:keyword port:
:paramtype port: int
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword nodes:
:paramtype nodes: ~flow.models.Nodes
"""
super(ApplicationEndpointConfiguration, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.port = kwargs.get('port', None)
self.properties = kwargs.get('properties', None)
self.nodes = kwargs.get('nodes', None)
class ArgumentAssignment(msrest.serialization.Model):
"""ArgumentAssignment.
:ivar value_type: Possible values include: "Literal", "Parameter", "Input", "Output",
"NestedList", "StringInterpolationList".
:vartype value_type: str or ~flow.models.ArgumentValueType
:ivar value:
:vartype value: str
:ivar nested_argument_list:
:vartype nested_argument_list: list[~flow.models.ArgumentAssignment]
:ivar string_interpolation_argument_list:
:vartype string_interpolation_argument_list: list[~flow.models.ArgumentAssignment]
"""
_attribute_map = {
'value_type': {'key': 'valueType', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'nested_argument_list': {'key': 'nestedArgumentList', 'type': '[ArgumentAssignment]'},
'string_interpolation_argument_list': {'key': 'stringInterpolationArgumentList', 'type': '[ArgumentAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value_type: Possible values include: "Literal", "Parameter", "Input", "Output",
"NestedList", "StringInterpolationList".
:paramtype value_type: str or ~flow.models.ArgumentValueType
:keyword value:
:paramtype value: str
:keyword nested_argument_list:
:paramtype nested_argument_list: list[~flow.models.ArgumentAssignment]
:keyword string_interpolation_argument_list:
:paramtype string_interpolation_argument_list: list[~flow.models.ArgumentAssignment]
"""
super(ArgumentAssignment, self).__init__(**kwargs)
self.value_type = kwargs.get('value_type', None)
self.value = kwargs.get('value', None)
self.nested_argument_list = kwargs.get('nested_argument_list', None)
self.string_interpolation_argument_list = kwargs.get('string_interpolation_argument_list', None)
class Asset(msrest.serialization.Model):
"""Asset.
:ivar asset_id:
:vartype asset_id: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'asset_id': {'key': 'assetId', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword asset_id:
:paramtype asset_id: str
:keyword type:
:paramtype type: str
"""
super(Asset, self).__init__(**kwargs)
self.asset_id = kwargs.get('asset_id', None)
self.type = kwargs.get('type', None)
class AssetDefinition(msrest.serialization.Model):
"""AssetDefinition.
:ivar path:
:vartype path: str
:ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:vartype type: str or ~flow.models.AEVAAssetType
:ivar asset_id:
:vartype asset_id: str
:ivar serialized_asset_id:
:vartype serialized_asset_id: str
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'asset_id': {'key': 'assetId', 'type': 'str'},
'serialized_asset_id': {'key': 'serializedAssetId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword path:
:paramtype path: str
:keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:paramtype type: str or ~flow.models.AEVAAssetType
:keyword asset_id:
:paramtype asset_id: str
:keyword serialized_asset_id:
:paramtype serialized_asset_id: str
"""
super(AssetDefinition, self).__init__(**kwargs)
self.path = kwargs.get('path', None)
self.type = kwargs.get('type', None)
self.asset_id = kwargs.get('asset_id', None)
self.serialized_asset_id = kwargs.get('serialized_asset_id', None)
class AssetNameAndVersionIdentifier(msrest.serialization.Model):
"""AssetNameAndVersionIdentifier.
:ivar asset_name:
:vartype asset_name: str
:ivar version:
:vartype version: str
:ivar feed_name:
:vartype feed_name: str
"""
_attribute_map = {
'asset_name': {'key': 'assetName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'feed_name': {'key': 'feedName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword asset_name:
:paramtype asset_name: str
:keyword version:
:paramtype version: str
:keyword feed_name:
:paramtype feed_name: str
"""
super(AssetNameAndVersionIdentifier, self).__init__(**kwargs)
self.asset_name = kwargs.get('asset_name', None)
self.version = kwargs.get('version', None)
self.feed_name = kwargs.get('feed_name', None)
class AssetOutputSettings(msrest.serialization.Model):
"""AssetOutputSettings.
:ivar path:
:vartype path: str
:ivar path_parameter_assignment:
:vartype path_parameter_assignment: ~flow.models.ParameterAssignment
:ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:vartype type: str or ~flow.models.AEVAAssetType
:ivar options: This is a dictionary.
:vartype options: dict[str, str]
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'path_parameter_assignment': {'key': 'PathParameterAssignment', 'type': 'ParameterAssignment'},
'type': {'key': 'type', 'type': 'str'},
'options': {'key': 'options', 'type': '{str}'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword path:
:paramtype path: str
:keyword path_parameter_assignment:
:paramtype path_parameter_assignment: ~flow.models.ParameterAssignment
:keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:paramtype type: str or ~flow.models.AEVAAssetType
:keyword options: This is a dictionary.
:paramtype options: dict[str, str]
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
"""
super(AssetOutputSettings, self).__init__(**kwargs)
self.path = kwargs.get('path', None)
self.path_parameter_assignment = kwargs.get('path_parameter_assignment', None)
self.type = kwargs.get('type', None)
self.options = kwargs.get('options', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
class AssetOutputSettingsParameter(msrest.serialization.Model):
"""AssetOutputSettingsParameter.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar default_value:
:vartype default_value: ~flow.models.AssetOutputSettings
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'AssetOutputSettings'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword default_value:
:paramtype default_value: ~flow.models.AssetOutputSettings
"""
super(AssetOutputSettingsParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.documentation = kwargs.get('documentation', None)
self.default_value = kwargs.get('default_value', None)
class AssetPublishResult(msrest.serialization.Model):
"""AssetPublishResult.
:ivar feed_name:
:vartype feed_name: str
:ivar asset_name:
:vartype asset_name: str
:ivar asset_version:
:vartype asset_version: str
:ivar step_name:
:vartype step_name: str
:ivar status:
:vartype status: str
:ivar error_message:
:vartype error_message: str
:ivar created_time:
:vartype created_time: ~datetime.datetime
:ivar last_updated_time:
:vartype last_updated_time: ~datetime.datetime
:ivar regional_publish_results: Dictionary of :code:`<AssetPublishSingleRegionResult>`.
:vartype regional_publish_results: dict[str, ~flow.models.AssetPublishSingleRegionResult]
"""
_attribute_map = {
'feed_name': {'key': 'feedName', 'type': 'str'},
'asset_name': {'key': 'assetName', 'type': 'str'},
'asset_version': {'key': 'assetVersion', 'type': 'str'},
'step_name': {'key': 'stepName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'last_updated_time': {'key': 'lastUpdatedTime', 'type': 'iso-8601'},
'regional_publish_results': {'key': 'regionalPublishResults', 'type': '{AssetPublishSingleRegionResult}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword feed_name:
:paramtype feed_name: str
:keyword asset_name:
:paramtype asset_name: str
:keyword asset_version:
:paramtype asset_version: str
:keyword step_name:
:paramtype step_name: str
:keyword status:
:paramtype status: str
:keyword error_message:
:paramtype error_message: str
:keyword created_time:
:paramtype created_time: ~datetime.datetime
:keyword last_updated_time:
:paramtype last_updated_time: ~datetime.datetime
:keyword regional_publish_results: Dictionary of :code:`<AssetPublishSingleRegionResult>`.
:paramtype regional_publish_results: dict[str, ~flow.models.AssetPublishSingleRegionResult]
"""
super(AssetPublishResult, self).__init__(**kwargs)
self.feed_name = kwargs.get('feed_name', None)
self.asset_name = kwargs.get('asset_name', None)
self.asset_version = kwargs.get('asset_version', None)
self.step_name = kwargs.get('step_name', None)
self.status = kwargs.get('status', None)
self.error_message = kwargs.get('error_message', None)
self.created_time = kwargs.get('created_time', None)
self.last_updated_time = kwargs.get('last_updated_time', None)
self.regional_publish_results = kwargs.get('regional_publish_results', None)
class AssetPublishSingleRegionResult(msrest.serialization.Model):
"""AssetPublishSingleRegionResult.
:ivar step_name:
:vartype step_name: str
:ivar status:
:vartype status: str
:ivar error_message:
:vartype error_message: str
:ivar last_updated_time:
:vartype last_updated_time: ~datetime.datetime
:ivar total_steps:
:vartype total_steps: int
:ivar finished_steps:
:vartype finished_steps: int
:ivar remaining_steps:
:vartype remaining_steps: int
"""
_attribute_map = {
'step_name': {'key': 'stepName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
'last_updated_time': {'key': 'lastUpdatedTime', 'type': 'iso-8601'},
'total_steps': {'key': 'totalSteps', 'type': 'int'},
'finished_steps': {'key': 'finishedSteps', 'type': 'int'},
'remaining_steps': {'key': 'remainingSteps', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword step_name:
:paramtype step_name: str
:keyword status:
:paramtype status: str
:keyword error_message:
:paramtype error_message: str
:keyword last_updated_time:
:paramtype last_updated_time: ~datetime.datetime
:keyword total_steps:
:paramtype total_steps: int
:keyword finished_steps:
:paramtype finished_steps: int
:keyword remaining_steps:
:paramtype remaining_steps: int
"""
super(AssetPublishSingleRegionResult, self).__init__(**kwargs)
self.step_name = kwargs.get('step_name', None)
self.status = kwargs.get('status', None)
self.error_message = kwargs.get('error_message', None)
self.last_updated_time = kwargs.get('last_updated_time', None)
self.total_steps = kwargs.get('total_steps', None)
self.finished_steps = kwargs.get('finished_steps', None)
self.remaining_steps = kwargs.get('remaining_steps', None)
class AssetTypeMetaInfo(msrest.serialization.Model):
"""AssetTypeMetaInfo.
:ivar consumption_mode: Possible values include: "Reference", "Copy", "CopyAndAutoUpgrade".
:vartype consumption_mode: str or ~flow.models.ConsumeMode
"""
_attribute_map = {
'consumption_mode': {'key': 'consumptionMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword consumption_mode: Possible values include: "Reference", "Copy", "CopyAndAutoUpgrade".
:paramtype consumption_mode: str or ~flow.models.ConsumeMode
"""
super(AssetTypeMetaInfo, self).__init__(**kwargs)
self.consumption_mode = kwargs.get('consumption_mode', None)
class AssetVersionPublishRequest(msrest.serialization.Model):
"""AssetVersionPublishRequest.
:ivar asset_type: Possible values include: "Component", "Model", "Environment", "Dataset",
"DataStore", "SampleGraph", "FlowTool", "FlowToolSetting", "FlowConnection", "FlowSample",
"FlowRuntimeSpec".
:vartype asset_type: str or ~flow.models.AssetType
:ivar asset_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip".
:vartype asset_source_type: str or ~flow.models.AssetSourceType
:ivar yaml_file:
:vartype yaml_file: str
:ivar source_zip_url:
:vartype source_zip_url: str
:ivar source_zip_file:
:vartype source_zip_file: IO
:ivar feed_name:
:vartype feed_name: str
:ivar set_as_default_version:
:vartype set_as_default_version: bool
:ivar referenced_assets:
:vartype referenced_assets: list[~flow.models.AssetNameAndVersionIdentifier]
:ivar flow_file:
:vartype flow_file: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'asset_type': {'key': 'assetType', 'type': 'str'},
'asset_source_type': {'key': 'assetSourceType', 'type': 'str'},
'yaml_file': {'key': 'yamlFile', 'type': 'str'},
'source_zip_url': {'key': 'sourceZipUrl', 'type': 'str'},
'source_zip_file': {'key': 'sourceZipFile', 'type': 'IO'},
'feed_name': {'key': 'feedName', 'type': 'str'},
'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'},
'referenced_assets': {'key': 'referencedAssets', 'type': '[AssetNameAndVersionIdentifier]'},
'flow_file': {'key': 'flowFile', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword asset_type: Possible values include: "Component", "Model", "Environment", "Dataset",
"DataStore", "SampleGraph", "FlowTool", "FlowToolSetting", "FlowConnection", "FlowSample",
"FlowRuntimeSpec".
:paramtype asset_type: str or ~flow.models.AssetType
:keyword asset_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip".
:paramtype asset_source_type: str or ~flow.models.AssetSourceType
:keyword yaml_file:
:paramtype yaml_file: str
:keyword source_zip_url:
:paramtype source_zip_url: str
:keyword source_zip_file:
:paramtype source_zip_file: IO
:keyword feed_name:
:paramtype feed_name: str
:keyword set_as_default_version:
:paramtype set_as_default_version: bool
:keyword referenced_assets:
:paramtype referenced_assets: list[~flow.models.AssetNameAndVersionIdentifier]
:keyword flow_file:
:paramtype flow_file: str
:keyword version:
:paramtype version: str
"""
super(AssetVersionPublishRequest, self).__init__(**kwargs)
self.asset_type = kwargs.get('asset_type', None)
self.asset_source_type = kwargs.get('asset_source_type', None)
self.yaml_file = kwargs.get('yaml_file', None)
self.source_zip_url = kwargs.get('source_zip_url', None)
self.source_zip_file = kwargs.get('source_zip_file', None)
self.feed_name = kwargs.get('feed_name', None)
self.set_as_default_version = kwargs.get('set_as_default_version', None)
self.referenced_assets = kwargs.get('referenced_assets', None)
self.flow_file = kwargs.get('flow_file', None)
self.version = kwargs.get('version', None)
class AssignedUser(msrest.serialization.Model):
"""AssignedUser.
:ivar object_id:
:vartype object_id: str
:ivar tenant_id:
:vartype tenant_id: str
"""
_attribute_map = {
'object_id': {'key': 'objectId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword object_id:
:paramtype object_id: str
:keyword tenant_id:
:paramtype tenant_id: str
"""
super(AssignedUser, self).__init__(**kwargs)
self.object_id = kwargs.get('object_id', None)
self.tenant_id = kwargs.get('tenant_id', None)
class AttachCosmosRequest(msrest.serialization.Model):
"""AttachCosmosRequest.
:ivar account_endpoint:
:vartype account_endpoint: str
:ivar resource_arm_id:
:vartype resource_arm_id: str
:ivar database_name:
:vartype database_name: str
"""
_attribute_map = {
'account_endpoint': {'key': 'accountEndpoint', 'type': 'str'},
'resource_arm_id': {'key': 'resourceArmId', 'type': 'str'},
'database_name': {'key': 'databaseName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword account_endpoint:
:paramtype account_endpoint: str
:keyword resource_arm_id:
:paramtype resource_arm_id: str
:keyword database_name:
:paramtype database_name: str
"""
super(AttachCosmosRequest, self).__init__(**kwargs)
self.account_endpoint = kwargs.get('account_endpoint', None)
self.resource_arm_id = kwargs.get('resource_arm_id', None)
self.database_name = kwargs.get('database_name', None)
class AuthKeys(msrest.serialization.Model):
"""AuthKeys.
:ivar primary_key:
:vartype primary_key: str
:ivar secondary_key:
:vartype secondary_key: str
"""
_attribute_map = {
'primary_key': {'key': 'primaryKey', 'type': 'str'},
'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword primary_key:
:paramtype primary_key: str
:keyword secondary_key:
:paramtype secondary_key: str
"""
super(AuthKeys, self).__init__(**kwargs)
self.primary_key = kwargs.get('primary_key', None)
self.secondary_key = kwargs.get('secondary_key', None)
class AutoClusterComputeSpecification(msrest.serialization.Model):
"""AutoClusterComputeSpecification.
:ivar instance_size:
:vartype instance_size: str
:ivar instance_priority:
:vartype instance_priority: str
:ivar os_type:
:vartype os_type: str
:ivar location:
:vartype location: str
:ivar runtime_version:
:vartype runtime_version: str
:ivar quota_enforcement_resource_id:
:vartype quota_enforcement_resource_id: str
:ivar model_compute_specification_id:
:vartype model_compute_specification_id: str
"""
_attribute_map = {
'instance_size': {'key': 'instanceSize', 'type': 'str'},
'instance_priority': {'key': 'instancePriority', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'runtime_version': {'key': 'runtimeVersion', 'type': 'str'},
'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'},
'model_compute_specification_id': {'key': 'modelComputeSpecificationId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword instance_size:
:paramtype instance_size: str
:keyword instance_priority:
:paramtype instance_priority: str
:keyword os_type:
:paramtype os_type: str
:keyword location:
:paramtype location: str
:keyword runtime_version:
:paramtype runtime_version: str
:keyword quota_enforcement_resource_id:
:paramtype quota_enforcement_resource_id: str
:keyword model_compute_specification_id:
:paramtype model_compute_specification_id: str
"""
super(AutoClusterComputeSpecification, self).__init__(**kwargs)
self.instance_size = kwargs.get('instance_size', None)
self.instance_priority = kwargs.get('instance_priority', None)
self.os_type = kwargs.get('os_type', None)
self.location = kwargs.get('location', None)
self.runtime_version = kwargs.get('runtime_version', None)
self.quota_enforcement_resource_id = kwargs.get('quota_enforcement_resource_id', None)
self.model_compute_specification_id = kwargs.get('model_compute_specification_id', None)
class AutoDeleteSetting(msrest.serialization.Model):
"""AutoDeleteSetting.
:ivar condition: Possible values include: "CreatedGreaterThan", "LastAccessedGreaterThan".
:vartype condition: str or ~flow.models.AutoDeleteCondition
:ivar value:
:vartype value: str
"""
_attribute_map = {
'condition': {'key': 'condition', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword condition: Possible values include: "CreatedGreaterThan", "LastAccessedGreaterThan".
:paramtype condition: str or ~flow.models.AutoDeleteCondition
:keyword value:
:paramtype value: str
"""
super(AutoDeleteSetting, self).__init__(**kwargs)
self.condition = kwargs.get('condition', None)
self.value = kwargs.get('value', None)
class AutoFeaturizeConfiguration(msrest.serialization.Model):
"""AutoFeaturizeConfiguration.
:ivar featurization_config:
:vartype featurization_config: ~flow.models.FeaturizationSettings
"""
_attribute_map = {
'featurization_config': {'key': 'featurizationConfig', 'type': 'FeaturizationSettings'},
}
def __init__(
self,
**kwargs
):
"""
:keyword featurization_config:
:paramtype featurization_config: ~flow.models.FeaturizationSettings
"""
super(AutoFeaturizeConfiguration, self).__init__(**kwargs)
self.featurization_config = kwargs.get('featurization_config', None)
class AutologgerSettings(msrest.serialization.Model):
"""AutologgerSettings.
:ivar ml_flow_autologger: Possible values include: "Enabled", "Disabled".
:vartype ml_flow_autologger: str or ~flow.models.MLFlowAutologgerState
"""
_attribute_map = {
'ml_flow_autologger': {'key': 'mlFlowAutologger', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword ml_flow_autologger: Possible values include: "Enabled", "Disabled".
:paramtype ml_flow_autologger: str or ~flow.models.MLFlowAutologgerState
"""
super(AutologgerSettings, self).__init__(**kwargs)
self.ml_flow_autologger = kwargs.get('ml_flow_autologger', None)
class AutoMLComponentConfiguration(msrest.serialization.Model):
"""AutoMLComponentConfiguration.
:ivar auto_train_config:
:vartype auto_train_config: ~flow.models.AutoTrainConfiguration
:ivar auto_featurize_config:
:vartype auto_featurize_config: ~flow.models.AutoFeaturizeConfiguration
"""
_attribute_map = {
'auto_train_config': {'key': 'autoTrainConfig', 'type': 'AutoTrainConfiguration'},
'auto_featurize_config': {'key': 'autoFeaturizeConfig', 'type': 'AutoFeaturizeConfiguration'},
}
def __init__(
self,
**kwargs
):
"""
:keyword auto_train_config:
:paramtype auto_train_config: ~flow.models.AutoTrainConfiguration
:keyword auto_featurize_config:
:paramtype auto_featurize_config: ~flow.models.AutoFeaturizeConfiguration
"""
super(AutoMLComponentConfiguration, self).__init__(**kwargs)
self.auto_train_config = kwargs.get('auto_train_config', None)
self.auto_featurize_config = kwargs.get('auto_featurize_config', None)
class AutoScaler(msrest.serialization.Model):
"""AutoScaler.
:ivar autoscale_enabled:
:vartype autoscale_enabled: bool
:ivar min_replicas:
:vartype min_replicas: int
:ivar max_replicas:
:vartype max_replicas: int
:ivar target_utilization:
:vartype target_utilization: int
:ivar refresh_period_in_seconds:
:vartype refresh_period_in_seconds: int
"""
_attribute_map = {
'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'},
'min_replicas': {'key': 'minReplicas', 'type': 'int'},
'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword autoscale_enabled:
:paramtype autoscale_enabled: bool
:keyword min_replicas:
:paramtype min_replicas: int
:keyword max_replicas:
:paramtype max_replicas: int
:keyword target_utilization:
:paramtype target_utilization: int
:keyword refresh_period_in_seconds:
:paramtype refresh_period_in_seconds: int
"""
super(AutoScaler, self).__init__(**kwargs)
self.autoscale_enabled = kwargs.get('autoscale_enabled', None)
self.min_replicas = kwargs.get('min_replicas', None)
self.max_replicas = kwargs.get('max_replicas', None)
self.target_utilization = kwargs.get('target_utilization', None)
self.refresh_period_in_seconds = kwargs.get('refresh_period_in_seconds', None)
class AutoTrainConfiguration(msrest.serialization.Model):
"""AutoTrainConfiguration.
:ivar general_settings:
:vartype general_settings: ~flow.models.GeneralSettings
:ivar limit_settings:
:vartype limit_settings: ~flow.models.LimitSettings
:ivar data_settings:
:vartype data_settings: ~flow.models.DataSettings
:ivar forecasting_settings:
:vartype forecasting_settings: ~flow.models.ForecastingSettings
:ivar training_settings:
:vartype training_settings: ~flow.models.TrainingSettings
:ivar sweep_settings:
:vartype sweep_settings: ~flow.models.SweepSettings
:ivar image_model_settings: Dictionary of :code:`<any>`.
:vartype image_model_settings: dict[str, any]
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar compute_configuration:
:vartype compute_configuration: ~flow.models.AEVAComputeConfiguration
:ivar resource_configurtion:
:vartype resource_configurtion: ~flow.models.AEVAResourceConfiguration
:ivar environment_id:
:vartype environment_id: str
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
"""
_attribute_map = {
'general_settings': {'key': 'generalSettings', 'type': 'GeneralSettings'},
'limit_settings': {'key': 'limitSettings', 'type': 'LimitSettings'},
'data_settings': {'key': 'dataSettings', 'type': 'DataSettings'},
'forecasting_settings': {'key': 'forecastingSettings', 'type': 'ForecastingSettings'},
'training_settings': {'key': 'trainingSettings', 'type': 'TrainingSettings'},
'sweep_settings': {'key': 'sweepSettings', 'type': 'SweepSettings'},
'image_model_settings': {'key': 'imageModelSettings', 'type': '{object}'},
'properties': {'key': 'properties', 'type': '{str}'},
'compute_configuration': {'key': 'computeConfiguration', 'type': 'AEVAComputeConfiguration'},
'resource_configurtion': {'key': 'resourceConfigurtion', 'type': 'AEVAResourceConfiguration'},
'environment_id': {'key': 'environmentId', 'type': 'str'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword general_settings:
:paramtype general_settings: ~flow.models.GeneralSettings
:keyword limit_settings:
:paramtype limit_settings: ~flow.models.LimitSettings
:keyword data_settings:
:paramtype data_settings: ~flow.models.DataSettings
:keyword forecasting_settings:
:paramtype forecasting_settings: ~flow.models.ForecastingSettings
:keyword training_settings:
:paramtype training_settings: ~flow.models.TrainingSettings
:keyword sweep_settings:
:paramtype sweep_settings: ~flow.models.SweepSettings
:keyword image_model_settings: Dictionary of :code:`<any>`.
:paramtype image_model_settings: dict[str, any]
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword compute_configuration:
:paramtype compute_configuration: ~flow.models.AEVAComputeConfiguration
:keyword resource_configurtion:
:paramtype resource_configurtion: ~flow.models.AEVAResourceConfiguration
:keyword environment_id:
:paramtype environment_id: str
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
"""
super(AutoTrainConfiguration, self).__init__(**kwargs)
self.general_settings = kwargs.get('general_settings', None)
self.limit_settings = kwargs.get('limit_settings', None)
self.data_settings = kwargs.get('data_settings', None)
self.forecasting_settings = kwargs.get('forecasting_settings', None)
self.training_settings = kwargs.get('training_settings', None)
self.sweep_settings = kwargs.get('sweep_settings', None)
self.image_model_settings = kwargs.get('image_model_settings', None)
self.properties = kwargs.get('properties', None)
self.compute_configuration = kwargs.get('compute_configuration', None)
self.resource_configurtion = kwargs.get('resource_configurtion', None)
self.environment_id = kwargs.get('environment_id', None)
self.environment_variables = kwargs.get('environment_variables', None)
class AvailabilityResponse(msrest.serialization.Model):
"""AvailabilityResponse.
:ivar is_available:
:vartype is_available: bool
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
"""
_attribute_map = {
'is_available': {'key': 'isAvailable', 'type': 'bool'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
}
def __init__(
self,
**kwargs
):
"""
:keyword is_available:
:paramtype is_available: bool
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
"""
super(AvailabilityResponse, self).__init__(**kwargs)
self.is_available = kwargs.get('is_available', None)
self.error = kwargs.get('error', None)
class AzureBlobReference(msrest.serialization.Model):
"""AzureBlobReference.
:ivar container:
:vartype container: str
:ivar sas_token:
:vartype sas_token: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'container': {'key': 'container', 'type': 'str'},
'sas_token': {'key': 'sasToken', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword container:
:paramtype container: str
:keyword sas_token:
:paramtype sas_token: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AzureBlobReference, self).__init__(**kwargs)
self.container = kwargs.get('container', None)
self.sas_token = kwargs.get('sas_token', None)
self.uri = kwargs.get('uri', None)
self.account = kwargs.get('account', None)
self.relative_path = kwargs.get('relative_path', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AzureDatabaseReference(msrest.serialization.Model):
"""AzureDatabaseReference.
:ivar table_name:
:vartype table_name: str
:ivar sql_query:
:vartype sql_query: str
:ivar stored_procedure_name:
:vartype stored_procedure_name: str
:ivar stored_procedure_parameters:
:vartype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter]
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'table_name': {'key': 'tableName', 'type': 'str'},
'sql_query': {'key': 'sqlQuery', 'type': 'str'},
'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[StoredProcedureParameter]'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword table_name:
:paramtype table_name: str
:keyword sql_query:
:paramtype sql_query: str
:keyword stored_procedure_name:
:paramtype stored_procedure_name: str
:keyword stored_procedure_parameters:
:paramtype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter]
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AzureDatabaseReference, self).__init__(**kwargs)
self.table_name = kwargs.get('table_name', None)
self.sql_query = kwargs.get('sql_query', None)
self.stored_procedure_name = kwargs.get('stored_procedure_name', None)
self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AzureDataLakeGen2Reference(msrest.serialization.Model):
"""AzureDataLakeGen2Reference.
:ivar file_system_name:
:vartype file_system_name: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'file_system_name': {'key': 'fileSystemName', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword file_system_name:
:paramtype file_system_name: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AzureDataLakeGen2Reference, self).__init__(**kwargs)
self.file_system_name = kwargs.get('file_system_name', None)
self.uri = kwargs.get('uri', None)
self.account = kwargs.get('account', None)
self.relative_path = kwargs.get('relative_path', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AzureDataLakeReference(msrest.serialization.Model):
"""AzureDataLakeReference.
:ivar tenant:
:vartype tenant: str
:ivar subscription:
:vartype subscription: str
:ivar resource_group:
:vartype resource_group: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'tenant': {'key': 'tenant', 'type': 'str'},
'subscription': {'key': 'subscription', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword tenant:
:paramtype tenant: str
:keyword subscription:
:paramtype subscription: str
:keyword resource_group:
:paramtype resource_group: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AzureDataLakeReference, self).__init__(**kwargs)
self.tenant = kwargs.get('tenant', None)
self.subscription = kwargs.get('subscription', None)
self.resource_group = kwargs.get('resource_group', None)
self.uri = kwargs.get('uri', None)
self.account = kwargs.get('account', None)
self.relative_path = kwargs.get('relative_path', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AzureFilesReference(msrest.serialization.Model):
"""AzureFilesReference.
:ivar share:
:vartype share: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'share': {'key': 'share', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword share:
:paramtype share: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AzureFilesReference, self).__init__(**kwargs)
self.share = kwargs.get('share', None)
self.uri = kwargs.get('uri', None)
self.account = kwargs.get('account', None)
self.relative_path = kwargs.get('relative_path', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AzureMLModuleVersionDescriptor(msrest.serialization.Model):
"""AzureMLModuleVersionDescriptor.
:ivar module_version_id:
:vartype module_version_id: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'module_version_id': {'key': 'moduleVersionId', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword module_version_id:
:paramtype module_version_id: str
:keyword version:
:paramtype version: str
"""
super(AzureMLModuleVersionDescriptor, self).__init__(**kwargs)
self.module_version_id = kwargs.get('module_version_id', None)
self.version = kwargs.get('version', None)
class AzureOpenAIDeploymentDto(msrest.serialization.Model):
"""AzureOpenAIDeploymentDto.
:ivar name:
:vartype name: str
:ivar model_name:
:vartype model_name: str
:ivar capabilities:
:vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'model_name': {'key': 'modelName', 'type': 'str'},
'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword model_name:
:paramtype model_name: str
:keyword capabilities:
:paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities
"""
super(AzureOpenAIDeploymentDto, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.model_name = kwargs.get('model_name', None)
self.capabilities = kwargs.get('capabilities', None)
class AzureOpenAIModelCapabilities(msrest.serialization.Model):
"""AzureOpenAIModelCapabilities.
:ivar completion:
:vartype completion: bool
:ivar chat_completion:
:vartype chat_completion: bool
:ivar embeddings:
:vartype embeddings: bool
"""
_attribute_map = {
'completion': {'key': 'completion', 'type': 'bool'},
'chat_completion': {'key': 'chat_completion', 'type': 'bool'},
'embeddings': {'key': 'embeddings', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword completion:
:paramtype completion: bool
:keyword chat_completion:
:paramtype chat_completion: bool
:keyword embeddings:
:paramtype embeddings: bool
"""
super(AzureOpenAIModelCapabilities, self).__init__(**kwargs)
self.completion = kwargs.get('completion', None)
self.chat_completion = kwargs.get('chat_completion', None)
self.embeddings = kwargs.get('embeddings', None)
class BatchAiComputeInfo(msrest.serialization.Model):
"""BatchAiComputeInfo.
:ivar batch_ai_subscription_id:
:vartype batch_ai_subscription_id: str
:ivar batch_ai_resource_group:
:vartype batch_ai_resource_group: str
:ivar batch_ai_workspace_name:
:vartype batch_ai_workspace_name: str
:ivar cluster_name:
:vartype cluster_name: str
:ivar native_shared_directory:
:vartype native_shared_directory: str
"""
_attribute_map = {
'batch_ai_subscription_id': {'key': 'batchAiSubscriptionId', 'type': 'str'},
'batch_ai_resource_group': {'key': 'batchAiResourceGroup', 'type': 'str'},
'batch_ai_workspace_name': {'key': 'batchAiWorkspaceName', 'type': 'str'},
'cluster_name': {'key': 'clusterName', 'type': 'str'},
'native_shared_directory': {'key': 'nativeSharedDirectory', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword batch_ai_subscription_id:
:paramtype batch_ai_subscription_id: str
:keyword batch_ai_resource_group:
:paramtype batch_ai_resource_group: str
:keyword batch_ai_workspace_name:
:paramtype batch_ai_workspace_name: str
:keyword cluster_name:
:paramtype cluster_name: str
:keyword native_shared_directory:
:paramtype native_shared_directory: str
"""
super(BatchAiComputeInfo, self).__init__(**kwargs)
self.batch_ai_subscription_id = kwargs.get('batch_ai_subscription_id', None)
self.batch_ai_resource_group = kwargs.get('batch_ai_resource_group', None)
self.batch_ai_workspace_name = kwargs.get('batch_ai_workspace_name', None)
self.cluster_name = kwargs.get('cluster_name', None)
self.native_shared_directory = kwargs.get('native_shared_directory', None)
class BatchDataInput(msrest.serialization.Model):
"""BatchDataInput.
:ivar data_uri:
:vartype data_uri: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'data_uri': {'key': 'dataUri', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_uri:
:paramtype data_uri: str
:keyword type:
:paramtype type: str
"""
super(BatchDataInput, self).__init__(**kwargs)
self.data_uri = kwargs.get('data_uri', None)
self.type = kwargs.get('type', None)
class BatchExportComponentSpecResponse(msrest.serialization.Model):
"""BatchExportComponentSpecResponse.
:ivar component_spec_meta_infos:
:vartype component_spec_meta_infos: list[~flow.models.ComponentSpecMetaInfo]
:ivar errors:
:vartype errors: list[~flow.models.ErrorResponse]
"""
_attribute_map = {
'component_spec_meta_infos': {'key': 'componentSpecMetaInfos', 'type': '[ComponentSpecMetaInfo]'},
'errors': {'key': 'errors', 'type': '[ErrorResponse]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword component_spec_meta_infos:
:paramtype component_spec_meta_infos: list[~flow.models.ComponentSpecMetaInfo]
:keyword errors:
:paramtype errors: list[~flow.models.ErrorResponse]
"""
super(BatchExportComponentSpecResponse, self).__init__(**kwargs)
self.component_spec_meta_infos = kwargs.get('component_spec_meta_infos', None)
self.errors = kwargs.get('errors', None)
class BatchExportRawComponentResponse(msrest.serialization.Model):
"""BatchExportRawComponentResponse.
:ivar raw_component_dtos:
:vartype raw_component_dtos: list[~flow.models.RawComponentDto]
:ivar errors:
:vartype errors: list[~flow.models.ErrorResponse]
"""
_attribute_map = {
'raw_component_dtos': {'key': 'rawComponentDtos', 'type': '[RawComponentDto]'},
'errors': {'key': 'errors', 'type': '[ErrorResponse]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword raw_component_dtos:
:paramtype raw_component_dtos: list[~flow.models.RawComponentDto]
:keyword errors:
:paramtype errors: list[~flow.models.ErrorResponse]
"""
super(BatchExportRawComponentResponse, self).__init__(**kwargs)
self.raw_component_dtos = kwargs.get('raw_component_dtos', None)
self.errors = kwargs.get('errors', None)
class BatchGetComponentHashesRequest(msrest.serialization.Model):
"""BatchGetComponentHashesRequest.
:ivar module_hash_version: Possible values include: "IdentifierHash", "IdentifierHashV2".
:vartype module_hash_version: str or ~flow.models.AetherModuleHashVersion
:ivar module_entities: Dictionary of :code:`<AetherModuleEntity>`.
:vartype module_entities: dict[str, ~flow.models.AetherModuleEntity]
"""
_attribute_map = {
'module_hash_version': {'key': 'moduleHashVersion', 'type': 'str'},
'module_entities': {'key': 'moduleEntities', 'type': '{AetherModuleEntity}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword module_hash_version: Possible values include: "IdentifierHash", "IdentifierHashV2".
:paramtype module_hash_version: str or ~flow.models.AetherModuleHashVersion
:keyword module_entities: Dictionary of :code:`<AetherModuleEntity>`.
:paramtype module_entities: dict[str, ~flow.models.AetherModuleEntity]
"""
super(BatchGetComponentHashesRequest, self).__init__(**kwargs)
self.module_hash_version = kwargs.get('module_hash_version', None)
self.module_entities = kwargs.get('module_entities', None)
class BatchGetComponentRequest(msrest.serialization.Model):
"""BatchGetComponentRequest.
:ivar version_ids:
:vartype version_ids: list[str]
:ivar name_and_versions:
:vartype name_and_versions: list[~flow.models.ComponentNameMetaInfo]
"""
_attribute_map = {
'version_ids': {'key': 'versionIds', 'type': '[str]'},
'name_and_versions': {'key': 'nameAndVersions', 'type': '[ComponentNameMetaInfo]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword version_ids:
:paramtype version_ids: list[str]
:keyword name_and_versions:
:paramtype name_and_versions: list[~flow.models.ComponentNameMetaInfo]
"""
super(BatchGetComponentRequest, self).__init__(**kwargs)
self.version_ids = kwargs.get('version_ids', None)
self.name_and_versions = kwargs.get('name_and_versions', None)
class Binding(msrest.serialization.Model):
"""Binding.
:ivar binding_type: The only acceptable values to pass in are None and "Basic". The default
value is None.
:vartype binding_type: str
"""
_attribute_map = {
'binding_type': {'key': 'bindingType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword binding_type: The only acceptable values to pass in are None and "Basic". The default
value is None.
:paramtype binding_type: str
"""
super(Binding, self).__init__(**kwargs)
self.binding_type = kwargs.get('binding_type', None)
class BulkTestDto(msrest.serialization.Model):
"""BulkTestDto.
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar runtime:
:vartype runtime: str
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar evaluation_count:
:vartype evaluation_count: int
:ivar variant_count:
:vartype variant_count: int
:ivar flow_submit_run_settings:
:vartype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.FlowInputDefinition]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.FlowOutputDefinition]
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
"""
_attribute_map = {
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'runtime': {'key': 'runtime', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'evaluation_count': {'key': 'evaluationCount', 'type': 'int'},
'variant_count': {'key': 'variantCount', 'type': 'int'},
'flow_submit_run_settings': {'key': 'flowSubmitRunSettings', 'type': 'FlowSubmitRunSettings'},
'inputs': {'key': 'inputs', 'type': '{FlowInputDefinition}'},
'outputs': {'key': 'outputs', 'type': '{FlowOutputDefinition}'},
'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
}
def __init__(
self,
**kwargs
):
"""
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword runtime:
:paramtype runtime: str
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword evaluation_count:
:paramtype evaluation_count: int
:keyword variant_count:
:paramtype variant_count: int
:keyword flow_submit_run_settings:
:paramtype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.FlowInputDefinition]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.FlowOutputDefinition]
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
"""
super(BulkTestDto, self).__init__(**kwargs)
self.bulk_test_id = kwargs.get('bulk_test_id', None)
self.display_name = kwargs.get('display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.runtime = kwargs.get('runtime', None)
self.created_by = kwargs.get('created_by', None)
self.created_on = kwargs.get('created_on', None)
self.evaluation_count = kwargs.get('evaluation_count', None)
self.variant_count = kwargs.get('variant_count', None)
self.flow_submit_run_settings = kwargs.get('flow_submit_run_settings', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.batch_inputs = kwargs.get('batch_inputs', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
class CloudError(msrest.serialization.Model):
"""CloudError.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar code:
:vartype code: str
:ivar message:
:vartype message: str
:ivar target:
:vartype target: str
:ivar details:
:vartype details: list[~flow.models.CloudError]
:ivar additional_info:
:vartype additional_info: list[~flow.models.AdditionalErrorInfo]
"""
_validation = {
'details': {'readonly': True},
'additional_info': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[CloudError]'},
'additional_info': {'key': 'additionalInfo', 'type': '[AdditionalErrorInfo]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword message:
:paramtype message: str
:keyword target:
:paramtype target: str
"""
super(CloudError, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.target = kwargs.get('target', None)
self.details = None
self.additional_info = None
class CloudPrioritySetting(msrest.serialization.Model):
"""CloudPrioritySetting.
:ivar scope_priority:
:vartype scope_priority: ~flow.models.PriorityConfiguration
:ivar aml_compute_priority:
:vartype aml_compute_priority: ~flow.models.PriorityConfiguration
:ivar itp_priority:
:vartype itp_priority: ~flow.models.PriorityConfiguration
:ivar singularity_priority:
:vartype singularity_priority: ~flow.models.PriorityConfiguration
"""
_attribute_map = {
'scope_priority': {'key': 'scopePriority', 'type': 'PriorityConfiguration'},
'aml_compute_priority': {'key': 'AmlComputePriority', 'type': 'PriorityConfiguration'},
'itp_priority': {'key': 'ItpPriority', 'type': 'PriorityConfiguration'},
'singularity_priority': {'key': 'SingularityPriority', 'type': 'PriorityConfiguration'},
}
def __init__(
self,
**kwargs
):
"""
:keyword scope_priority:
:paramtype scope_priority: ~flow.models.PriorityConfiguration
:keyword aml_compute_priority:
:paramtype aml_compute_priority: ~flow.models.PriorityConfiguration
:keyword itp_priority:
:paramtype itp_priority: ~flow.models.PriorityConfiguration
:keyword singularity_priority:
:paramtype singularity_priority: ~flow.models.PriorityConfiguration
"""
super(CloudPrioritySetting, self).__init__(**kwargs)
self.scope_priority = kwargs.get('scope_priority', None)
self.aml_compute_priority = kwargs.get('aml_compute_priority', None)
self.itp_priority = kwargs.get('itp_priority', None)
self.singularity_priority = kwargs.get('singularity_priority', None)
class CloudSettings(msrest.serialization.Model):
"""CloudSettings.
:ivar linked_settings:
:vartype linked_settings: list[~flow.models.ParameterAssignment]
:ivar priority_config:
:vartype priority_config: ~flow.models.PriorityConfiguration
:ivar hdi_run_config:
:vartype hdi_run_config: ~flow.models.HdiRunConfiguration
:ivar sub_graph_config:
:vartype sub_graph_config: ~flow.models.SubGraphConfiguration
:ivar auto_ml_component_config:
:vartype auto_ml_component_config: ~flow.models.AutoMLComponentConfiguration
:ivar ap_cloud_config:
:vartype ap_cloud_config: ~flow.models.APCloudConfiguration
:ivar scope_cloud_config:
:vartype scope_cloud_config: ~flow.models.ScopeCloudConfiguration
:ivar es_cloud_config:
:vartype es_cloud_config: ~flow.models.EsCloudConfiguration
:ivar data_transfer_cloud_config:
:vartype data_transfer_cloud_config: ~flow.models.DataTransferCloudConfiguration
:ivar aml_spark_cloud_setting:
:vartype aml_spark_cloud_setting: ~flow.models.AmlSparkCloudSetting
:ivar data_transfer_v2_cloud_setting:
:vartype data_transfer_v2_cloud_setting: ~flow.models.DataTransferV2CloudSetting
"""
_attribute_map = {
'linked_settings': {'key': 'linkedSettings', 'type': '[ParameterAssignment]'},
'priority_config': {'key': 'priorityConfig', 'type': 'PriorityConfiguration'},
'hdi_run_config': {'key': 'hdiRunConfig', 'type': 'HdiRunConfiguration'},
'sub_graph_config': {'key': 'subGraphConfig', 'type': 'SubGraphConfiguration'},
'auto_ml_component_config': {'key': 'autoMLComponentConfig', 'type': 'AutoMLComponentConfiguration'},
'ap_cloud_config': {'key': 'apCloudConfig', 'type': 'APCloudConfiguration'},
'scope_cloud_config': {'key': 'scopeCloudConfig', 'type': 'ScopeCloudConfiguration'},
'es_cloud_config': {'key': 'esCloudConfig', 'type': 'EsCloudConfiguration'},
'data_transfer_cloud_config': {'key': 'dataTransferCloudConfig', 'type': 'DataTransferCloudConfiguration'},
'aml_spark_cloud_setting': {'key': 'amlSparkCloudSetting', 'type': 'AmlSparkCloudSetting'},
'data_transfer_v2_cloud_setting': {'key': 'dataTransferV2CloudSetting', 'type': 'DataTransferV2CloudSetting'},
}
def __init__(
self,
**kwargs
):
"""
:keyword linked_settings:
:paramtype linked_settings: list[~flow.models.ParameterAssignment]
:keyword priority_config:
:paramtype priority_config: ~flow.models.PriorityConfiguration
:keyword hdi_run_config:
:paramtype hdi_run_config: ~flow.models.HdiRunConfiguration
:keyword sub_graph_config:
:paramtype sub_graph_config: ~flow.models.SubGraphConfiguration
:keyword auto_ml_component_config:
:paramtype auto_ml_component_config: ~flow.models.AutoMLComponentConfiguration
:keyword ap_cloud_config:
:paramtype ap_cloud_config: ~flow.models.APCloudConfiguration
:keyword scope_cloud_config:
:paramtype scope_cloud_config: ~flow.models.ScopeCloudConfiguration
:keyword es_cloud_config:
:paramtype es_cloud_config: ~flow.models.EsCloudConfiguration
:keyword data_transfer_cloud_config:
:paramtype data_transfer_cloud_config: ~flow.models.DataTransferCloudConfiguration
:keyword aml_spark_cloud_setting:
:paramtype aml_spark_cloud_setting: ~flow.models.AmlSparkCloudSetting
:keyword data_transfer_v2_cloud_setting:
:paramtype data_transfer_v2_cloud_setting: ~flow.models.DataTransferV2CloudSetting
"""
super(CloudSettings, self).__init__(**kwargs)
self.linked_settings = kwargs.get('linked_settings', None)
self.priority_config = kwargs.get('priority_config', None)
self.hdi_run_config = kwargs.get('hdi_run_config', None)
self.sub_graph_config = kwargs.get('sub_graph_config', None)
self.auto_ml_component_config = kwargs.get('auto_ml_component_config', None)
self.ap_cloud_config = kwargs.get('ap_cloud_config', None)
self.scope_cloud_config = kwargs.get('scope_cloud_config', None)
self.es_cloud_config = kwargs.get('es_cloud_config', None)
self.data_transfer_cloud_config = kwargs.get('data_transfer_cloud_config', None)
self.aml_spark_cloud_setting = kwargs.get('aml_spark_cloud_setting', None)
self.data_transfer_v2_cloud_setting = kwargs.get('data_transfer_v2_cloud_setting', None)
class ColumnTransformer(msrest.serialization.Model):
"""ColumnTransformer.
:ivar fields:
:vartype fields: list[str]
:ivar parameters: Anything.
:vartype parameters: any
"""
_attribute_map = {
'fields': {'key': 'fields', 'type': '[str]'},
'parameters': {'key': 'parameters', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword fields:
:paramtype fields: list[str]
:keyword parameters: Anything.
:paramtype parameters: any
"""
super(ColumnTransformer, self).__init__(**kwargs)
self.fields = kwargs.get('fields', None)
self.parameters = kwargs.get('parameters', None)
class CommandJob(msrest.serialization.Model):
"""CommandJob.
:ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:vartype job_type: str or ~flow.models.JobType
:ivar code_id:
:vartype code_id: str
:ivar command:
:vartype command: str
:ivar environment_id:
:vartype environment_id: str
:ivar input_data_bindings: Dictionary of :code:`<InputDataBinding>`.
:vartype input_data_bindings: dict[str, ~flow.models.InputDataBinding]
:ivar output_data_bindings: Dictionary of :code:`<OutputDataBinding>`.
:vartype output_data_bindings: dict[str, ~flow.models.OutputDataBinding]
:ivar distribution:
:vartype distribution: ~flow.models.DistributionConfiguration
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar parameters: Dictionary of :code:`<any>`.
:vartype parameters: dict[str, any]
:ivar autologger_settings:
:vartype autologger_settings: ~flow.models.MfeInternalAutologgerSettings
:ivar limits:
:vartype limits: ~flow.models.CommandJobLimits
:ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:vartype provisioning_state: str or ~flow.models.JobProvisioningState
:ivar parent_job_name:
:vartype parent_job_name: str
:ivar display_name:
:vartype display_name: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing",
"Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled",
"NotResponding", "Paused", "Unknown", "Scheduled".
:vartype status: str or ~flow.models.JobStatus
:ivar interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:vartype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:ivar identity:
:vartype identity: ~flow.models.MfeInternalIdentityConfiguration
:ivar compute:
:vartype compute: ~flow.models.ComputeConfiguration
:ivar priority:
:vartype priority: int
:ivar output:
:vartype output: ~flow.models.JobOutputArtifacts
:ivar is_archived:
:vartype is_archived: bool
:ivar schedule:
:vartype schedule: ~flow.models.ScheduleBase
:ivar component_id:
:vartype component_id: str
:ivar notification_setting:
:vartype notification_setting: ~flow.models.NotificationSetting
:ivar secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:vartype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_validation = {
'command': {'min_length': 1},
}
_attribute_map = {
'job_type': {'key': 'jobType', 'type': 'str'},
'code_id': {'key': 'codeId', 'type': 'str'},
'command': {'key': 'command', 'type': 'str'},
'environment_id': {'key': 'environmentId', 'type': 'str'},
'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'},
'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'},
'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'parameters': {'key': 'parameters', 'type': '{object}'},
'autologger_settings': {'key': 'autologgerSettings', 'type': 'MfeInternalAutologgerSettings'},
'limits': {'key': 'limits', 'type': 'CommandJobLimits'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'parent_job_name': {'key': 'parentJobName', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
'identity': {'key': 'identity', 'type': 'MfeInternalIdentityConfiguration'},
'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
'priority': {'key': 'priority', 'type': 'int'},
'output': {'key': 'output', 'type': 'JobOutputArtifacts'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'schedule': {'key': 'schedule', 'type': 'ScheduleBase'},
'component_id': {'key': 'componentId', 'type': 'str'},
'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'},
'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{MfeInternalSecretConfiguration}'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:paramtype job_type: str or ~flow.models.JobType
:keyword code_id:
:paramtype code_id: str
:keyword command:
:paramtype command: str
:keyword environment_id:
:paramtype environment_id: str
:keyword input_data_bindings: Dictionary of :code:`<InputDataBinding>`.
:paramtype input_data_bindings: dict[str, ~flow.models.InputDataBinding]
:keyword output_data_bindings: Dictionary of :code:`<OutputDataBinding>`.
:paramtype output_data_bindings: dict[str, ~flow.models.OutputDataBinding]
:keyword distribution:
:paramtype distribution: ~flow.models.DistributionConfiguration
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword parameters: Dictionary of :code:`<any>`.
:paramtype parameters: dict[str, any]
:keyword autologger_settings:
:paramtype autologger_settings: ~flow.models.MfeInternalAutologgerSettings
:keyword limits:
:paramtype limits: ~flow.models.CommandJobLimits
:keyword provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:paramtype provisioning_state: str or ~flow.models.JobProvisioningState
:keyword parent_job_name:
:paramtype parent_job_name: str
:keyword display_name:
:paramtype display_name: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword status: Possible values include: "NotStarted", "Starting", "Provisioning",
"Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed",
"Canceled", "NotResponding", "Paused", "Unknown", "Scheduled".
:paramtype status: str or ~flow.models.JobStatus
:keyword interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:paramtype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:keyword identity:
:paramtype identity: ~flow.models.MfeInternalIdentityConfiguration
:keyword compute:
:paramtype compute: ~flow.models.ComputeConfiguration
:keyword priority:
:paramtype priority: int
:keyword output:
:paramtype output: ~flow.models.JobOutputArtifacts
:keyword is_archived:
:paramtype is_archived: bool
:keyword schedule:
:paramtype schedule: ~flow.models.ScheduleBase
:keyword component_id:
:paramtype component_id: str
:keyword notification_setting:
:paramtype notification_setting: ~flow.models.NotificationSetting
:keyword secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:paramtype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(CommandJob, self).__init__(**kwargs)
self.job_type = kwargs.get('job_type', None)
self.code_id = kwargs.get('code_id', None)
self.command = kwargs.get('command', None)
self.environment_id = kwargs.get('environment_id', None)
self.input_data_bindings = kwargs.get('input_data_bindings', None)
self.output_data_bindings = kwargs.get('output_data_bindings', None)
self.distribution = kwargs.get('distribution', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.parameters = kwargs.get('parameters', None)
self.autologger_settings = kwargs.get('autologger_settings', None)
self.limits = kwargs.get('limits', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.parent_job_name = kwargs.get('parent_job_name', None)
self.display_name = kwargs.get('display_name', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.status = kwargs.get('status', None)
self.interaction_endpoints = kwargs.get('interaction_endpoints', None)
self.identity = kwargs.get('identity', None)
self.compute = kwargs.get('compute', None)
self.priority = kwargs.get('priority', None)
self.output = kwargs.get('output', None)
self.is_archived = kwargs.get('is_archived', None)
self.schedule = kwargs.get('schedule', None)
self.component_id = kwargs.get('component_id', None)
self.notification_setting = kwargs.get('notification_setting', None)
self.secrets_configuration = kwargs.get('secrets_configuration', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
class CommandJobLimits(msrest.serialization.Model):
"""CommandJobLimits.
:ivar job_limits_type: Possible values include: "Command", "Sweep".
:vartype job_limits_type: str or ~flow.models.JobLimitsType
:ivar timeout:
:vartype timeout: str
"""
_attribute_map = {
'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'},
'timeout': {'key': 'timeout', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_limits_type: Possible values include: "Command", "Sweep".
:paramtype job_limits_type: str or ~flow.models.JobLimitsType
:keyword timeout:
:paramtype timeout: str
"""
super(CommandJobLimits, self).__init__(**kwargs)
self.job_limits_type = kwargs.get('job_limits_type', None)
self.timeout = kwargs.get('timeout', None)
class CommandReturnCodeConfig(msrest.serialization.Model):
"""CommandReturnCodeConfig.
:ivar return_code: Possible values include: "Zero", "ZeroOrGreater".
:vartype return_code: str or ~flow.models.SuccessfulCommandReturnCode
:ivar successful_return_codes:
:vartype successful_return_codes: list[int]
"""
_attribute_map = {
'return_code': {'key': 'returnCode', 'type': 'str'},
'successful_return_codes': {'key': 'successfulReturnCodes', 'type': '[int]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword return_code: Possible values include: "Zero", "ZeroOrGreater".
:paramtype return_code: str or ~flow.models.SuccessfulCommandReturnCode
:keyword successful_return_codes:
:paramtype successful_return_codes: list[int]
"""
super(CommandReturnCodeConfig, self).__init__(**kwargs)
self.return_code = kwargs.get('return_code', None)
self.successful_return_codes = kwargs.get('successful_return_codes', None)
class ComponentConfiguration(msrest.serialization.Model):
"""ComponentConfiguration.
:ivar component_identifier:
:vartype component_identifier: str
"""
_attribute_map = {
'component_identifier': {'key': 'componentIdentifier', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword component_identifier:
:paramtype component_identifier: str
"""
super(ComponentConfiguration, self).__init__(**kwargs)
self.component_identifier = kwargs.get('component_identifier', None)
class ComponentInput(msrest.serialization.Model):
"""ComponentInput.
:ivar name:
:vartype name: str
:ivar optional:
:vartype optional: bool
:ivar description:
:vartype description: str
:ivar type:
:vartype type: str
:ivar default:
:vartype default: str
:ivar enum:
:vartype enum: list[str]
:ivar min:
:vartype min: str
:ivar max:
:vartype max: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'optional': {'key': 'optional', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'default': {'key': 'default', 'type': 'str'},
'enum': {'key': 'enum', 'type': '[str]'},
'min': {'key': 'min', 'type': 'str'},
'max': {'key': 'max', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword optional:
:paramtype optional: bool
:keyword description:
:paramtype description: str
:keyword type:
:paramtype type: str
:keyword default:
:paramtype default: str
:keyword enum:
:paramtype enum: list[str]
:keyword min:
:paramtype min: str
:keyword max:
:paramtype max: str
"""
super(ComponentInput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.optional = kwargs.get('optional', None)
self.description = kwargs.get('description', None)
self.type = kwargs.get('type', None)
self.default = kwargs.get('default', None)
self.enum = kwargs.get('enum', None)
self.min = kwargs.get('min', None)
self.max = kwargs.get('max', None)
class ComponentJob(msrest.serialization.Model):
"""ComponentJob.
:ivar compute:
:vartype compute: ~flow.models.ComputeConfiguration
:ivar component_id:
:vartype component_id: str
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.ComponentJobInput]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.ComponentJobOutput]
"""
_attribute_map = {
'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
'component_id': {'key': 'componentId', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '{ComponentJobInput}'},
'outputs': {'key': 'outputs', 'type': '{ComponentJobOutput}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword compute:
:paramtype compute: ~flow.models.ComputeConfiguration
:keyword component_id:
:paramtype component_id: str
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.ComponentJobInput]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.ComponentJobOutput]
"""
super(ComponentJob, self).__init__(**kwargs)
self.compute = kwargs.get('compute', None)
self.component_id = kwargs.get('component_id', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
class ComponentJobInput(msrest.serialization.Model):
"""ComponentJobInput.
:ivar data:
:vartype data: ~flow.models.InputData
:ivar input_binding:
:vartype input_binding: str
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'InputData'},
'input_binding': {'key': 'inputBinding', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data:
:paramtype data: ~flow.models.InputData
:keyword input_binding:
:paramtype input_binding: str
"""
super(ComponentJobInput, self).__init__(**kwargs)
self.data = kwargs.get('data', None)
self.input_binding = kwargs.get('input_binding', None)
class ComponentJobOutput(msrest.serialization.Model):
"""ComponentJobOutput.
:ivar data:
:vartype data: ~flow.models.MfeInternalOutputData
:ivar output_binding:
:vartype output_binding: str
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'MfeInternalOutputData'},
'output_binding': {'key': 'outputBinding', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data:
:paramtype data: ~flow.models.MfeInternalOutputData
:keyword output_binding:
:paramtype output_binding: str
"""
super(ComponentJobOutput, self).__init__(**kwargs)
self.data = kwargs.get('data', None)
self.output_binding = kwargs.get('output_binding', None)
class ComponentNameAndDefaultVersion(msrest.serialization.Model):
"""ComponentNameAndDefaultVersion.
:ivar component_name:
:vartype component_name: str
:ivar version:
:vartype version: str
:ivar feed_name:
:vartype feed_name: str
:ivar registry_name:
:vartype registry_name: str
"""
_attribute_map = {
'component_name': {'key': 'componentName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'feed_name': {'key': 'feedName', 'type': 'str'},
'registry_name': {'key': 'registryName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword component_name:
:paramtype component_name: str
:keyword version:
:paramtype version: str
:keyword feed_name:
:paramtype feed_name: str
:keyword registry_name:
:paramtype registry_name: str
"""
super(ComponentNameAndDefaultVersion, self).__init__(**kwargs)
self.component_name = kwargs.get('component_name', None)
self.version = kwargs.get('version', None)
self.feed_name = kwargs.get('feed_name', None)
self.registry_name = kwargs.get('registry_name', None)
class ComponentNameMetaInfo(msrest.serialization.Model):
"""ComponentNameMetaInfo.
:ivar feed_name:
:vartype feed_name: str
:ivar component_name:
:vartype component_name: str
:ivar component_version:
:vartype component_version: str
:ivar registry_name:
:vartype registry_name: str
"""
_attribute_map = {
'feed_name': {'key': 'feedName', 'type': 'str'},
'component_name': {'key': 'componentName', 'type': 'str'},
'component_version': {'key': 'componentVersion', 'type': 'str'},
'registry_name': {'key': 'registryName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword feed_name:
:paramtype feed_name: str
:keyword component_name:
:paramtype component_name: str
:keyword component_version:
:paramtype component_version: str
:keyword registry_name:
:paramtype registry_name: str
"""
super(ComponentNameMetaInfo, self).__init__(**kwargs)
self.feed_name = kwargs.get('feed_name', None)
self.component_name = kwargs.get('component_name', None)
self.component_version = kwargs.get('component_version', None)
self.registry_name = kwargs.get('registry_name', None)
class ComponentOutput(msrest.serialization.Model):
"""ComponentOutput.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword type:
:paramtype type: str
"""
super(ComponentOutput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.type = kwargs.get('type', None)
class ComponentPreflightResult(msrest.serialization.Model):
"""ComponentPreflightResult.
:ivar error_details:
:vartype error_details: list[~flow.models.RootError]
"""
_attribute_map = {
'error_details': {'key': 'errorDetails', 'type': '[RootError]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword error_details:
:paramtype error_details: list[~flow.models.RootError]
"""
super(ComponentPreflightResult, self).__init__(**kwargs)
self.error_details = kwargs.get('error_details', None)
class ComponentSpecMetaInfo(msrest.serialization.Model):
"""ComponentSpecMetaInfo.
:ivar component_spec: Anything.
:vartype component_spec: any
:ivar component_version:
:vartype component_version: str
:ivar is_anonymous:
:vartype is_anonymous: bool
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar component_name:
:vartype component_name: str
:ivar description:
:vartype description: str
:ivar is_archived:
:vartype is_archived: bool
"""
_attribute_map = {
'component_spec': {'key': 'componentSpec', 'type': 'object'},
'component_version': {'key': 'componentVersion', 'type': 'str'},
'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
'properties': {'key': 'properties', 'type': '{str}'},
'tags': {'key': 'tags', 'type': '{str}'},
'component_name': {'key': 'componentName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword component_spec: Anything.
:paramtype component_spec: any
:keyword component_version:
:paramtype component_version: str
:keyword is_anonymous:
:paramtype is_anonymous: bool
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword component_name:
:paramtype component_name: str
:keyword description:
:paramtype description: str
:keyword is_archived:
:paramtype is_archived: bool
"""
super(ComponentSpecMetaInfo, self).__init__(**kwargs)
self.component_spec = kwargs.get('component_spec', None)
self.component_version = kwargs.get('component_version', None)
self.is_anonymous = kwargs.get('is_anonymous', None)
self.properties = kwargs.get('properties', None)
self.tags = kwargs.get('tags', None)
self.component_name = kwargs.get('component_name', None)
self.description = kwargs.get('description', None)
self.is_archived = kwargs.get('is_archived', None)
class ComponentUpdateRequest(msrest.serialization.Model):
"""ComponentUpdateRequest.
:ivar original_module_entity:
:vartype original_module_entity: ~flow.models.ModuleEntity
:ivar update_module_entity:
:vartype update_module_entity: ~flow.models.ModuleEntity
:ivar module_name:
:vartype module_name: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar overwrite_with_original_name_and_version:
:vartype overwrite_with_original_name_and_version: bool
:ivar snapshot_id:
:vartype snapshot_id: str
"""
_attribute_map = {
'original_module_entity': {'key': 'originalModuleEntity', 'type': 'ModuleEntity'},
'update_module_entity': {'key': 'updateModuleEntity', 'type': 'ModuleEntity'},
'module_name': {'key': 'moduleName', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'overwrite_with_original_name_and_version': {'key': 'overwriteWithOriginalNameAndVersion', 'type': 'bool'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword original_module_entity:
:paramtype original_module_entity: ~flow.models.ModuleEntity
:keyword update_module_entity:
:paramtype update_module_entity: ~flow.models.ModuleEntity
:keyword module_name:
:paramtype module_name: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword overwrite_with_original_name_and_version:
:paramtype overwrite_with_original_name_and_version: bool
:keyword snapshot_id:
:paramtype snapshot_id: str
"""
super(ComponentUpdateRequest, self).__init__(**kwargs)
self.original_module_entity = kwargs.get('original_module_entity', None)
self.update_module_entity = kwargs.get('update_module_entity', None)
self.module_name = kwargs.get('module_name', None)
self.properties = kwargs.get('properties', None)
self.overwrite_with_original_name_and_version = kwargs.get('overwrite_with_original_name_and_version', None)
self.snapshot_id = kwargs.get('snapshot_id', None)
class ComponentValidationRequest(msrest.serialization.Model):
"""ComponentValidationRequest.
:ivar component_identifier:
:vartype component_identifier: str
:ivar compute_identity:
:vartype compute_identity: ~flow.models.ComputeIdentityDto
:ivar execution_context_dto:
:vartype execution_context_dto: ~flow.models.ExecutionContextDto
:ivar environment_definition:
:vartype environment_definition: ~flow.models.EnvironmentDefinitionDto
:ivar data_port_dtos:
:vartype data_port_dtos: list[~flow.models.DataPortDto]
"""
_attribute_map = {
'component_identifier': {'key': 'componentIdentifier', 'type': 'str'},
'compute_identity': {'key': 'computeIdentity', 'type': 'ComputeIdentityDto'},
'execution_context_dto': {'key': 'executionContextDto', 'type': 'ExecutionContextDto'},
'environment_definition': {'key': 'environmentDefinition', 'type': 'EnvironmentDefinitionDto'},
'data_port_dtos': {'key': 'dataPortDtos', 'type': '[DataPortDto]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword component_identifier:
:paramtype component_identifier: str
:keyword compute_identity:
:paramtype compute_identity: ~flow.models.ComputeIdentityDto
:keyword execution_context_dto:
:paramtype execution_context_dto: ~flow.models.ExecutionContextDto
:keyword environment_definition:
:paramtype environment_definition: ~flow.models.EnvironmentDefinitionDto
:keyword data_port_dtos:
:paramtype data_port_dtos: list[~flow.models.DataPortDto]
"""
super(ComponentValidationRequest, self).__init__(**kwargs)
self.component_identifier = kwargs.get('component_identifier', None)
self.compute_identity = kwargs.get('compute_identity', None)
self.execution_context_dto = kwargs.get('execution_context_dto', None)
self.environment_definition = kwargs.get('environment_definition', None)
self.data_port_dtos = kwargs.get('data_port_dtos', None)
class ComponentValidationResponse(msrest.serialization.Model):
"""ComponentValidationResponse.
:ivar status: Possible values include: "Succeeded", "Failed".
:vartype status: str or ~flow.models.ValidationStatus
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
}
def __init__(
self,
**kwargs
):
"""
:keyword status: Possible values include: "Succeeded", "Failed".
:paramtype status: str or ~flow.models.ValidationStatus
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
"""
super(ComponentValidationResponse, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.error = kwargs.get('error', None)
class Compute(msrest.serialization.Model):
"""Compute.
:ivar target:
:vartype target: str
:ivar target_type:
:vartype target_type: str
:ivar vm_size:
:vartype vm_size: str
:ivar instance_type:
:vartype instance_type: str
:ivar instance_count:
:vartype instance_count: int
:ivar gpu_count:
:vartype gpu_count: int
:ivar priority:
:vartype priority: str
:ivar region:
:vartype region: str
:ivar arm_id:
:vartype arm_id: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'target': {'key': 'target', 'type': 'str'},
'target_type': {'key': 'targetType', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'gpu_count': {'key': 'gpuCount', 'type': 'int'},
'priority': {'key': 'priority', 'type': 'str'},
'region': {'key': 'region', 'type': 'str'},
'arm_id': {'key': 'armId', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword target:
:paramtype target: str
:keyword target_type:
:paramtype target_type: str
:keyword vm_size:
:paramtype vm_size: str
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_count:
:paramtype instance_count: int
:keyword gpu_count:
:paramtype gpu_count: int
:keyword priority:
:paramtype priority: str
:keyword region:
:paramtype region: str
:keyword arm_id:
:paramtype arm_id: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(Compute, self).__init__(**kwargs)
self.target = kwargs.get('target', None)
self.target_type = kwargs.get('target_type', None)
self.vm_size = kwargs.get('vm_size', None)
self.instance_type = kwargs.get('instance_type', None)
self.instance_count = kwargs.get('instance_count', None)
self.gpu_count = kwargs.get('gpu_count', None)
self.priority = kwargs.get('priority', None)
self.region = kwargs.get('region', None)
self.arm_id = kwargs.get('arm_id', None)
self.properties = kwargs.get('properties', None)
class ComputeConfiguration(msrest.serialization.Model):
"""ComputeConfiguration.
:ivar target:
:vartype target: str
:ivar instance_count:
:vartype instance_count: int
:ivar max_instance_count:
:vartype max_instance_count: int
:ivar is_local:
:vartype is_local: bool
:ivar location:
:vartype location: str
:ivar is_clusterless:
:vartype is_clusterless: bool
:ivar instance_type:
:vartype instance_type: str
:ivar instance_priority:
:vartype instance_priority: str
:ivar job_priority:
:vartype job_priority: int
:ivar shm_size:
:vartype shm_size: str
:ivar docker_args:
:vartype docker_args: str
:ivar locations:
:vartype locations: list[str]
:ivar properties: Dictionary of :code:`<any>`.
:vartype properties: dict[str, any]
"""
_attribute_map = {
'target': {'key': 'target', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'max_instance_count': {'key': 'maxInstanceCount', 'type': 'int'},
'is_local': {'key': 'isLocal', 'type': 'bool'},
'location': {'key': 'location', 'type': 'str'},
'is_clusterless': {'key': 'isClusterless', 'type': 'bool'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_priority': {'key': 'instancePriority', 'type': 'str'},
'job_priority': {'key': 'jobPriority', 'type': 'int'},
'shm_size': {'key': 'shmSize', 'type': 'str'},
'docker_args': {'key': 'dockerArgs', 'type': 'str'},
'locations': {'key': 'locations', 'type': '[str]'},
'properties': {'key': 'properties', 'type': '{object}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword target:
:paramtype target: str
:keyword instance_count:
:paramtype instance_count: int
:keyword max_instance_count:
:paramtype max_instance_count: int
:keyword is_local:
:paramtype is_local: bool
:keyword location:
:paramtype location: str
:keyword is_clusterless:
:paramtype is_clusterless: bool
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_priority:
:paramtype instance_priority: str
:keyword job_priority:
:paramtype job_priority: int
:keyword shm_size:
:paramtype shm_size: str
:keyword docker_args:
:paramtype docker_args: str
:keyword locations:
:paramtype locations: list[str]
:keyword properties: Dictionary of :code:`<any>`.
:paramtype properties: dict[str, any]
"""
super(ComputeConfiguration, self).__init__(**kwargs)
self.target = kwargs.get('target', None)
self.instance_count = kwargs.get('instance_count', None)
self.max_instance_count = kwargs.get('max_instance_count', None)
self.is_local = kwargs.get('is_local', None)
self.location = kwargs.get('location', None)
self.is_clusterless = kwargs.get('is_clusterless', None)
self.instance_type = kwargs.get('instance_type', None)
self.instance_priority = kwargs.get('instance_priority', None)
self.job_priority = kwargs.get('job_priority', None)
self.shm_size = kwargs.get('shm_size', None)
self.docker_args = kwargs.get('docker_args', None)
self.locations = kwargs.get('locations', None)
self.properties = kwargs.get('properties', None)
class ComputeContract(msrest.serialization.Model):
"""ComputeContract.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar type:
:vartype type: str
:ivar location:
:vartype location: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar identity:
:vartype identity: ~flow.models.ComputeIdentityContract
:ivar properties:
:vartype properties: ~flow.models.ComputeProperties
"""
_validation = {
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'identity': {'key': 'identity', 'type': 'ComputeIdentityContract'},
'properties': {'key': 'properties', 'type': 'ComputeProperties'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword location:
:paramtype location: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword identity:
:paramtype identity: ~flow.models.ComputeIdentityContract
:keyword properties:
:paramtype properties: ~flow.models.ComputeProperties
"""
super(ComputeContract, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.type = None
self.location = kwargs.get('location', None)
self.tags = kwargs.get('tags', None)
self.identity = kwargs.get('identity', None)
self.properties = kwargs.get('properties', None)
class ComputeIdentityContract(msrest.serialization.Model):
"""ComputeIdentityContract.
:ivar type:
:vartype type: str
:ivar system_identity_url:
:vartype system_identity_url: str
:ivar principal_id:
:vartype principal_id: str
:ivar tenant_id:
:vartype tenant_id: str
:ivar client_id:
:vartype client_id: str
:ivar client_secret_url:
:vartype client_secret_url: str
:ivar user_assigned_identities: This is a dictionary.
:vartype user_assigned_identities: dict[str, ~flow.models.ComputeRPUserAssignedIdentity]
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'system_identity_url': {'key': 'systemIdentityUrl', 'type': 'str'},
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
'client_secret_url': {'key': 'clientSecretUrl', 'type': 'str'},
'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{ComputeRPUserAssignedIdentity}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type:
:paramtype type: str
:keyword system_identity_url:
:paramtype system_identity_url: str
:keyword principal_id:
:paramtype principal_id: str
:keyword tenant_id:
:paramtype tenant_id: str
:keyword client_id:
:paramtype client_id: str
:keyword client_secret_url:
:paramtype client_secret_url: str
:keyword user_assigned_identities: This is a dictionary.
:paramtype user_assigned_identities: dict[str, ~flow.models.ComputeRPUserAssignedIdentity]
"""
super(ComputeIdentityContract, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.system_identity_url = kwargs.get('system_identity_url', None)
self.principal_id = kwargs.get('principal_id', None)
self.tenant_id = kwargs.get('tenant_id', None)
self.client_id = kwargs.get('client_id', None)
self.client_secret_url = kwargs.get('client_secret_url', None)
self.user_assigned_identities = kwargs.get('user_assigned_identities', None)
class ComputeIdentityDto(msrest.serialization.Model):
"""ComputeIdentityDto.
:ivar compute_name:
:vartype compute_name: str
:ivar compute_target_type: Possible values include: "Local", "Remote", "HdiCluster",
"ContainerInstance", "AmlCompute", "ComputeInstance", "Cmk8s", "SynapseSpark", "Kubernetes",
"Aisc", "GlobalJobDispatcher", "Databricks", "MockedCompute".
:vartype compute_target_type: str or ~flow.models.ComputeTargetType
:ivar intellectual_property_publisher:
:vartype intellectual_property_publisher: str
"""
_attribute_map = {
'compute_name': {'key': 'computeName', 'type': 'str'},
'compute_target_type': {'key': 'computeTargetType', 'type': 'str'},
'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword compute_name:
:paramtype compute_name: str
:keyword compute_target_type: Possible values include: "Local", "Remote", "HdiCluster",
"ContainerInstance", "AmlCompute", "ComputeInstance", "Cmk8s", "SynapseSpark", "Kubernetes",
"Aisc", "GlobalJobDispatcher", "Databricks", "MockedCompute".
:paramtype compute_target_type: str or ~flow.models.ComputeTargetType
:keyword intellectual_property_publisher:
:paramtype intellectual_property_publisher: str
"""
super(ComputeIdentityDto, self).__init__(**kwargs)
self.compute_name = kwargs.get('compute_name', None)
self.compute_target_type = kwargs.get('compute_target_type', None)
self.intellectual_property_publisher = kwargs.get('intellectual_property_publisher', None)
class ComputeInfo(msrest.serialization.Model):
"""ComputeInfo.
:ivar name:
:vartype name: str
:ivar compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT",
"MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN".
:vartype compute_type: str or ~flow.models.ComputeEnvironmentType
:ivar is_ssl_enabled:
:vartype is_ssl_enabled: bool
:ivar is_gpu_type:
:vartype is_gpu_type: bool
:ivar cluster_purpose:
:vartype cluster_purpose: str
:ivar public_ip_address:
:vartype public_ip_address: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'is_ssl_enabled': {'key': 'isSslEnabled', 'type': 'bool'},
'is_gpu_type': {'key': 'isGpuType', 'type': 'bool'},
'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'},
'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT",
"AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE",
"UNKNOWN".
:paramtype compute_type: str or ~flow.models.ComputeEnvironmentType
:keyword is_ssl_enabled:
:paramtype is_ssl_enabled: bool
:keyword is_gpu_type:
:paramtype is_gpu_type: bool
:keyword cluster_purpose:
:paramtype cluster_purpose: str
:keyword public_ip_address:
:paramtype public_ip_address: str
"""
super(ComputeInfo, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.compute_type = kwargs.get('compute_type', None)
self.is_ssl_enabled = kwargs.get('is_ssl_enabled', None)
self.is_gpu_type = kwargs.get('is_gpu_type', None)
self.cluster_purpose = kwargs.get('cluster_purpose', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
class ComputeProperties(msrest.serialization.Model):
"""ComputeProperties.
All required parameters must be populated in order to send to Azure.
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar modified_on:
:vartype modified_on: ~datetime.datetime
:ivar disable_local_auth:
:vartype disable_local_auth: bool
:ivar description:
:vartype description: str
:ivar resource_id:
:vartype resource_id: str
:ivar compute_type: Required.
:vartype compute_type: str
:ivar compute_location:
:vartype compute_location: str
:ivar provisioning_state: Possible values include: "Unknown", "Updating", "Creating",
"Deleting", "Accepted", "Succeeded", "Failed", "Canceled".
:vartype provisioning_state: str or ~flow.models.ProvisioningState
:ivar provisioning_errors:
:vartype provisioning_errors: list[~flow.models.ODataErrorResponse]
:ivar provisioning_warnings: This is a dictionary.
:vartype provisioning_warnings: dict[str, str]
:ivar is_attached_compute:
:vartype is_attached_compute: bool
:ivar properties: Any object.
:vartype properties: any
:ivar status:
:vartype status: ~flow.models.ComputeStatus
:ivar warnings:
:vartype warnings: list[~flow.models.ComputeWarning]
"""
_validation = {
'compute_type': {'required': True, 'min_length': 1},
}
_attribute_map = {
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'compute_location': {'key': 'computeLocation', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ODataErrorResponse]'},
'provisioning_warnings': {'key': 'provisioningWarnings', 'type': '{str}'},
'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
'properties': {'key': 'properties', 'type': 'object'},
'status': {'key': 'status', 'type': 'ComputeStatus'},
'warnings': {'key': 'warnings', 'type': '[ComputeWarning]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword modified_on:
:paramtype modified_on: ~datetime.datetime
:keyword disable_local_auth:
:paramtype disable_local_auth: bool
:keyword description:
:paramtype description: str
:keyword resource_id:
:paramtype resource_id: str
:keyword compute_type: Required.
:paramtype compute_type: str
:keyword compute_location:
:paramtype compute_location: str
:keyword provisioning_state: Possible values include: "Unknown", "Updating", "Creating",
"Deleting", "Accepted", "Succeeded", "Failed", "Canceled".
:paramtype provisioning_state: str or ~flow.models.ProvisioningState
:keyword provisioning_errors:
:paramtype provisioning_errors: list[~flow.models.ODataErrorResponse]
:keyword provisioning_warnings: This is a dictionary.
:paramtype provisioning_warnings: dict[str, str]
:keyword is_attached_compute:
:paramtype is_attached_compute: bool
:keyword properties: Any object.
:paramtype properties: any
:keyword status:
:paramtype status: ~flow.models.ComputeStatus
:keyword warnings:
:paramtype warnings: list[~flow.models.ComputeWarning]
"""
super(ComputeProperties, self).__init__(**kwargs)
self.created_on = kwargs.get('created_on', None)
self.modified_on = kwargs.get('modified_on', None)
self.disable_local_auth = kwargs.get('disable_local_auth', None)
self.description = kwargs.get('description', None)
self.resource_id = kwargs.get('resource_id', None)
self.compute_type = kwargs['compute_type']
self.compute_location = kwargs.get('compute_location', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.provisioning_errors = kwargs.get('provisioning_errors', None)
self.provisioning_warnings = kwargs.get('provisioning_warnings', None)
self.is_attached_compute = kwargs.get('is_attached_compute', None)
self.properties = kwargs.get('properties', None)
self.status = kwargs.get('status', None)
self.warnings = kwargs.get('warnings', None)
class ComputeRequest(msrest.serialization.Model):
"""ComputeRequest.
:ivar node_count:
:vartype node_count: int
:ivar gpu_count:
:vartype gpu_count: int
"""
_attribute_map = {
'node_count': {'key': 'nodeCount', 'type': 'int'},
'gpu_count': {'key': 'gpuCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_count:
:paramtype node_count: int
:keyword gpu_count:
:paramtype gpu_count: int
"""
super(ComputeRequest, self).__init__(**kwargs)
self.node_count = kwargs.get('node_count', None)
self.gpu_count = kwargs.get('gpu_count', None)
class ComputeRPUserAssignedIdentity(msrest.serialization.Model):
"""ComputeRPUserAssignedIdentity.
:ivar principal_id:
:vartype principal_id: str
:ivar tenant_id:
:vartype tenant_id: str
:ivar client_id:
:vartype client_id: str
:ivar client_secret_url:
:vartype client_secret_url: str
:ivar resource_id:
:vartype resource_id: str
"""
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
'client_secret_url': {'key': 'clientSecretUrl', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword principal_id:
:paramtype principal_id: str
:keyword tenant_id:
:paramtype tenant_id: str
:keyword client_id:
:paramtype client_id: str
:keyword client_secret_url:
:paramtype client_secret_url: str
:keyword resource_id:
:paramtype resource_id: str
"""
super(ComputeRPUserAssignedIdentity, self).__init__(**kwargs)
self.principal_id = kwargs.get('principal_id', None)
self.tenant_id = kwargs.get('tenant_id', None)
self.client_id = kwargs.get('client_id', None)
self.client_secret_url = kwargs.get('client_secret_url', None)
self.resource_id = kwargs.get('resource_id', None)
class ComputeSetting(msrest.serialization.Model):
"""ComputeSetting.
:ivar name:
:vartype name: str
:ivar compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker",
"Databricks", "Aisc".
:vartype compute_type: str or ~flow.models.ComputeType
:ivar batch_ai_compute_info:
:vartype batch_ai_compute_info: ~flow.models.BatchAiComputeInfo
:ivar remote_docker_compute_info:
:vartype remote_docker_compute_info: ~flow.models.RemoteDockerComputeInfo
:ivar hdi_cluster_compute_info:
:vartype hdi_cluster_compute_info: ~flow.models.HdiClusterComputeInfo
:ivar mlc_compute_info:
:vartype mlc_compute_info: ~flow.models.MlcComputeInfo
:ivar databricks_compute_info:
:vartype databricks_compute_info: ~flow.models.DatabricksComputeInfo
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'batch_ai_compute_info': {'key': 'batchAiComputeInfo', 'type': 'BatchAiComputeInfo'},
'remote_docker_compute_info': {'key': 'remoteDockerComputeInfo', 'type': 'RemoteDockerComputeInfo'},
'hdi_cluster_compute_info': {'key': 'hdiClusterComputeInfo', 'type': 'HdiClusterComputeInfo'},
'mlc_compute_info': {'key': 'mlcComputeInfo', 'type': 'MlcComputeInfo'},
'databricks_compute_info': {'key': 'databricksComputeInfo', 'type': 'DatabricksComputeInfo'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker",
"Databricks", "Aisc".
:paramtype compute_type: str or ~flow.models.ComputeType
:keyword batch_ai_compute_info:
:paramtype batch_ai_compute_info: ~flow.models.BatchAiComputeInfo
:keyword remote_docker_compute_info:
:paramtype remote_docker_compute_info: ~flow.models.RemoteDockerComputeInfo
:keyword hdi_cluster_compute_info:
:paramtype hdi_cluster_compute_info: ~flow.models.HdiClusterComputeInfo
:keyword mlc_compute_info:
:paramtype mlc_compute_info: ~flow.models.MlcComputeInfo
:keyword databricks_compute_info:
:paramtype databricks_compute_info: ~flow.models.DatabricksComputeInfo
"""
super(ComputeSetting, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.compute_type = kwargs.get('compute_type', None)
self.batch_ai_compute_info = kwargs.get('batch_ai_compute_info', None)
self.remote_docker_compute_info = kwargs.get('remote_docker_compute_info', None)
self.hdi_cluster_compute_info = kwargs.get('hdi_cluster_compute_info', None)
self.mlc_compute_info = kwargs.get('mlc_compute_info', None)
self.databricks_compute_info = kwargs.get('databricks_compute_info', None)
class ComputeStatus(msrest.serialization.Model):
"""ComputeStatus.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar is_status_available:
:vartype is_status_available: bool
:ivar detailed_status: Anything.
:vartype detailed_status: any
:ivar error: Represents OData v4 error object.
:vartype error: ~flow.models.ODataError
"""
_validation = {
'is_status_available': {'readonly': True},
}
_attribute_map = {
'is_status_available': {'key': 'isStatusAvailable', 'type': 'bool'},
'detailed_status': {'key': 'detailedStatus', 'type': 'object'},
'error': {'key': 'error', 'type': 'ODataError'},
}
def __init__(
self,
**kwargs
):
"""
:keyword detailed_status: Anything.
:paramtype detailed_status: any
:keyword error: Represents OData v4 error object.
:paramtype error: ~flow.models.ODataError
"""
super(ComputeStatus, self).__init__(**kwargs)
self.is_status_available = None
self.detailed_status = kwargs.get('detailed_status', None)
self.error = kwargs.get('error', None)
class ComputeStatusDetail(msrest.serialization.Model):
"""ComputeStatusDetail.
:ivar provisioning_state:
:vartype provisioning_state: str
:ivar provisioning_error_message:
:vartype provisioning_error_message: str
"""
_attribute_map = {
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'provisioning_error_message': {'key': 'provisioningErrorMessage', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword provisioning_state:
:paramtype provisioning_state: str
:keyword provisioning_error_message:
:paramtype provisioning_error_message: str
"""
super(ComputeStatusDetail, self).__init__(**kwargs)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.provisioning_error_message = kwargs.get('provisioning_error_message', None)
class ComputeWarning(msrest.serialization.Model):
"""ComputeWarning.
:ivar title:
:vartype title: str
:ivar message:
:vartype message: str
:ivar code:
:vartype code: str
:ivar severity: Possible values include: "Critical", "Error", "Warning", "Info".
:vartype severity: str or ~flow.models.SeverityLevel
"""
_attribute_map = {
'title': {'key': 'title', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'code': {'key': 'code', 'type': 'str'},
'severity': {'key': 'severity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword title:
:paramtype title: str
:keyword message:
:paramtype message: str
:keyword code:
:paramtype code: str
:keyword severity: Possible values include: "Critical", "Error", "Warning", "Info".
:paramtype severity: str or ~flow.models.SeverityLevel
"""
super(ComputeWarning, self).__init__(**kwargs)
self.title = kwargs.get('title', None)
self.message = kwargs.get('message', None)
self.code = kwargs.get('code', None)
self.severity = kwargs.get('severity', None)
class ConnectionConfigSpec(msrest.serialization.Model):
"""ConnectionConfigSpec.
:ivar name:
:vartype name: str
:ivar display_name:
:vartype display_name: str
:ivar config_value_type: Possible values include: "String", "Secret".
:vartype config_value_type: str or ~flow.models.ConfigValueType
:ivar description:
:vartype description: str
:ivar default_value:
:vartype default_value: str
:ivar enum_values:
:vartype enum_values: list[str]
:ivar is_optional:
:vartype is_optional: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'config_value_type': {'key': 'configValueType', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'enum_values': {'key': 'enumValues', 'type': '[str]'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword display_name:
:paramtype display_name: str
:keyword config_value_type: Possible values include: "String", "Secret".
:paramtype config_value_type: str or ~flow.models.ConfigValueType
:keyword description:
:paramtype description: str
:keyword default_value:
:paramtype default_value: str
:keyword enum_values:
:paramtype enum_values: list[str]
:keyword is_optional:
:paramtype is_optional: bool
"""
super(ConnectionConfigSpec, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.config_value_type = kwargs.get('config_value_type', None)
self.description = kwargs.get('description', None)
self.default_value = kwargs.get('default_value', None)
self.enum_values = kwargs.get('enum_values', None)
self.is_optional = kwargs.get('is_optional', None)
class ConnectionDto(msrest.serialization.Model):
"""ConnectionDto.
:ivar connection_name:
:vartype connection_name: str
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar configs: This is a dictionary.
:vartype configs: dict[str, str]
:ivar custom_configs: This is a dictionary.
:vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:ivar expiry_time:
:vartype expiry_time: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'connection_name': {'key': 'connectionName', 'type': 'str'},
'connection_type': {'key': 'connectionType', 'type': 'str'},
'configs': {'key': 'configs', 'type': '{str}'},
'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'},
'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection_name:
:paramtype connection_name: str
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword configs: This is a dictionary.
:paramtype configs: dict[str, str]
:keyword custom_configs: This is a dictionary.
:paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:keyword expiry_time:
:paramtype expiry_time: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(ConnectionDto, self).__init__(**kwargs)
self.connection_name = kwargs.get('connection_name', None)
self.connection_type = kwargs.get('connection_type', None)
self.configs = kwargs.get('configs', None)
self.custom_configs = kwargs.get('custom_configs', None)
self.expiry_time = kwargs.get('expiry_time', None)
self.owner = kwargs.get('owner', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class ConnectionEntity(msrest.serialization.Model):
"""ConnectionEntity.
:ivar connection_id:
:vartype connection_id: str
:ivar connection_name:
:vartype connection_name: str
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar connection_scope: Possible values include: "User", "WorkspaceShared".
:vartype connection_scope: str or ~flow.models.ConnectionScope
:ivar configs: This is a dictionary.
:vartype configs: dict[str, str]
:ivar custom_configs: This is a dictionary.
:vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:ivar expiry_time:
:vartype expiry_time: ~datetime.datetime
:ivar secret_name:
:vartype secret_name: str
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'connection_id': {'key': 'connectionId', 'type': 'str'},
'connection_name': {'key': 'connectionName', 'type': 'str'},
'connection_type': {'key': 'connectionType', 'type': 'str'},
'connection_scope': {'key': 'connectionScope', 'type': 'str'},
'configs': {'key': 'configs', 'type': '{str}'},
'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'},
'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'},
'secret_name': {'key': 'secretName', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection_id:
:paramtype connection_id: str
:keyword connection_name:
:paramtype connection_name: str
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword connection_scope: Possible values include: "User", "WorkspaceShared".
:paramtype connection_scope: str or ~flow.models.ConnectionScope
:keyword configs: This is a dictionary.
:paramtype configs: dict[str, str]
:keyword custom_configs: This is a dictionary.
:paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:keyword expiry_time:
:paramtype expiry_time: ~datetime.datetime
:keyword secret_name:
:paramtype secret_name: str
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(ConnectionEntity, self).__init__(**kwargs)
self.connection_id = kwargs.get('connection_id', None)
self.connection_name = kwargs.get('connection_name', None)
self.connection_type = kwargs.get('connection_type', None)
self.connection_scope = kwargs.get('connection_scope', None)
self.configs = kwargs.get('configs', None)
self.custom_configs = kwargs.get('custom_configs', None)
self.expiry_time = kwargs.get('expiry_time', None)
self.secret_name = kwargs.get('secret_name', None)
self.owner = kwargs.get('owner', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class ConnectionOverrideSetting(msrest.serialization.Model):
"""ConnectionOverrideSetting.
:ivar connection_source_type: Possible values include: "Node", "NodeInput".
:vartype connection_source_type: str or ~flow.models.ConnectionSourceType
:ivar node_name:
:vartype node_name: str
:ivar node_input_name:
:vartype node_input_name: str
:ivar node_deployment_name_input:
:vartype node_deployment_name_input: str
:ivar node_model_input:
:vartype node_model_input: str
:ivar connection_name:
:vartype connection_name: str
:ivar deployment_name:
:vartype deployment_name: str
:ivar model:
:vartype model: str
:ivar connection_types:
:vartype connection_types: list[str or ~flow.models.ConnectionType]
:ivar capabilities:
:vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:ivar model_enum:
:vartype model_enum: list[str]
"""
_attribute_map = {
'connection_source_type': {'key': 'connectionSourceType', 'type': 'str'},
'node_name': {'key': 'nodeName', 'type': 'str'},
'node_input_name': {'key': 'nodeInputName', 'type': 'str'},
'node_deployment_name_input': {'key': 'nodeDeploymentNameInput', 'type': 'str'},
'node_model_input': {'key': 'nodeModelInput', 'type': 'str'},
'connection_name': {'key': 'connectionName', 'type': 'str'},
'deployment_name': {'key': 'deploymentName', 'type': 'str'},
'model': {'key': 'model', 'type': 'str'},
'connection_types': {'key': 'connectionTypes', 'type': '[str]'},
'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'},
'model_enum': {'key': 'modelEnum', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection_source_type: Possible values include: "Node", "NodeInput".
:paramtype connection_source_type: str or ~flow.models.ConnectionSourceType
:keyword node_name:
:paramtype node_name: str
:keyword node_input_name:
:paramtype node_input_name: str
:keyword node_deployment_name_input:
:paramtype node_deployment_name_input: str
:keyword node_model_input:
:paramtype node_model_input: str
:keyword connection_name:
:paramtype connection_name: str
:keyword deployment_name:
:paramtype deployment_name: str
:keyword model:
:paramtype model: str
:keyword connection_types:
:paramtype connection_types: list[str or ~flow.models.ConnectionType]
:keyword capabilities:
:paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:keyword model_enum:
:paramtype model_enum: list[str]
"""
super(ConnectionOverrideSetting, self).__init__(**kwargs)
self.connection_source_type = kwargs.get('connection_source_type', None)
self.node_name = kwargs.get('node_name', None)
self.node_input_name = kwargs.get('node_input_name', None)
self.node_deployment_name_input = kwargs.get('node_deployment_name_input', None)
self.node_model_input = kwargs.get('node_model_input', None)
self.connection_name = kwargs.get('connection_name', None)
self.deployment_name = kwargs.get('deployment_name', None)
self.model = kwargs.get('model', None)
self.connection_types = kwargs.get('connection_types', None)
self.capabilities = kwargs.get('capabilities', None)
self.model_enum = kwargs.get('model_enum', None)
class ConnectionSpec(msrest.serialization.Model):
"""ConnectionSpec.
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar config_specs:
:vartype config_specs: list[~flow.models.ConnectionConfigSpec]
"""
_attribute_map = {
'connection_type': {'key': 'connectionType', 'type': 'str'},
'config_specs': {'key': 'configSpecs', 'type': '[ConnectionConfigSpec]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword config_specs:
:paramtype config_specs: list[~flow.models.ConnectionConfigSpec]
"""
super(ConnectionSpec, self).__init__(**kwargs)
self.connection_type = kwargs.get('connection_type', None)
self.config_specs = kwargs.get('config_specs', None)
class ContainerInstanceConfiguration(msrest.serialization.Model):
"""ContainerInstanceConfiguration.
:ivar region:
:vartype region: str
:ivar cpu_cores:
:vartype cpu_cores: float
:ivar memory_gb:
:vartype memory_gb: float
"""
_attribute_map = {
'region': {'key': 'region', 'type': 'str'},
'cpu_cores': {'key': 'cpuCores', 'type': 'float'},
'memory_gb': {'key': 'memoryGb', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
"""
:keyword region:
:paramtype region: str
:keyword cpu_cores:
:paramtype cpu_cores: float
:keyword memory_gb:
:paramtype memory_gb: float
"""
super(ContainerInstanceConfiguration, self).__init__(**kwargs)
self.region = kwargs.get('region', None)
self.cpu_cores = kwargs.get('cpu_cores', None)
self.memory_gb = kwargs.get('memory_gb', None)
class ContainerRegistry(msrest.serialization.Model):
"""ContainerRegistry.
:ivar address:
:vartype address: str
:ivar username:
:vartype username: str
:ivar password:
:vartype password: str
:ivar credential_type:
:vartype credential_type: str
:ivar registry_identity:
:vartype registry_identity: ~flow.models.RegistryIdentity
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'credential_type': {'key': 'credentialType', 'type': 'str'},
'registry_identity': {'key': 'registryIdentity', 'type': 'RegistryIdentity'},
}
def __init__(
self,
**kwargs
):
"""
:keyword address:
:paramtype address: str
:keyword username:
:paramtype username: str
:keyword password:
:paramtype password: str
:keyword credential_type:
:paramtype credential_type: str
:keyword registry_identity:
:paramtype registry_identity: ~flow.models.RegistryIdentity
"""
super(ContainerRegistry, self).__init__(**kwargs)
self.address = kwargs.get('address', None)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.credential_type = kwargs.get('credential_type', None)
self.registry_identity = kwargs.get('registry_identity', None)
class ContainerResourceRequirements(msrest.serialization.Model):
"""ContainerResourceRequirements.
:ivar cpu:
:vartype cpu: float
:ivar cpu_limit:
:vartype cpu_limit: float
:ivar memory_in_gb:
:vartype memory_in_gb: float
:ivar memory_in_gb_limit:
:vartype memory_in_gb_limit: float
:ivar gpu_enabled:
:vartype gpu_enabled: bool
:ivar gpu:
:vartype gpu: int
:ivar fpga:
:vartype fpga: int
"""
_attribute_map = {
'cpu': {'key': 'cpu', 'type': 'float'},
'cpu_limit': {'key': 'cpuLimit', 'type': 'float'},
'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'},
'memory_in_gb_limit': {'key': 'memoryInGBLimit', 'type': 'float'},
'gpu_enabled': {'key': 'gpuEnabled', 'type': 'bool'},
'gpu': {'key': 'gpu', 'type': 'int'},
'fpga': {'key': 'fpga', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword cpu:
:paramtype cpu: float
:keyword cpu_limit:
:paramtype cpu_limit: float
:keyword memory_in_gb:
:paramtype memory_in_gb: float
:keyword memory_in_gb_limit:
:paramtype memory_in_gb_limit: float
:keyword gpu_enabled:
:paramtype gpu_enabled: bool
:keyword gpu:
:paramtype gpu: int
:keyword fpga:
:paramtype fpga: int
"""
super(ContainerResourceRequirements, self).__init__(**kwargs)
self.cpu = kwargs.get('cpu', None)
self.cpu_limit = kwargs.get('cpu_limit', None)
self.memory_in_gb = kwargs.get('memory_in_gb', None)
self.memory_in_gb_limit = kwargs.get('memory_in_gb_limit', None)
self.gpu_enabled = kwargs.get('gpu_enabled', None)
self.gpu = kwargs.get('gpu', None)
self.fpga = kwargs.get('fpga', None)
class ControlInput(msrest.serialization.Model):
"""ControlInput.
:ivar name:
:vartype name: str
:ivar default_value: Possible values include: "None", "False", "True", "Skipped".
:vartype default_value: str or ~flow.models.ControlInputValue
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword default_value: Possible values include: "None", "False", "True", "Skipped".
:paramtype default_value: str or ~flow.models.ControlInputValue
"""
super(ControlInput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.default_value = kwargs.get('default_value', None)
class ControlOutput(msrest.serialization.Model):
"""ControlOutput.
:ivar name:
:vartype name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
"""
super(ControlOutput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class CopyDataTask(msrest.serialization.Model):
"""CopyDataTask.
:ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:vartype data_copy_mode: str or ~flow.models.DataCopyMode
"""
_attribute_map = {
'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:paramtype data_copy_mode: str or ~flow.models.DataCopyMode
"""
super(CopyDataTask, self).__init__(**kwargs)
self.data_copy_mode = kwargs.get('data_copy_mode', None)
class CreatedBy(msrest.serialization.Model):
"""CreatedBy.
:ivar user_object_id:
:vartype user_object_id: str
:ivar user_tenant_id:
:vartype user_tenant_id: str
:ivar user_name:
:vartype user_name: str
"""
_attribute_map = {
'user_object_id': {'key': 'userObjectId', 'type': 'str'},
'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
'user_name': {'key': 'userName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword user_object_id:
:paramtype user_object_id: str
:keyword user_tenant_id:
:paramtype user_tenant_id: str
:keyword user_name:
:paramtype user_name: str
"""
super(CreatedBy, self).__init__(**kwargs)
self.user_object_id = kwargs.get('user_object_id', None)
self.user_tenant_id = kwargs.get('user_tenant_id', None)
self.user_name = kwargs.get('user_name', None)
class CreatedFromDto(msrest.serialization.Model):
"""CreatedFromDto.
:ivar type: The only acceptable values to pass in are None and "Notebook". The default value
is None.
:vartype type: str
:ivar location_type: The only acceptable values to pass in are None and "ArtifactId". The
default value is None.
:vartype location_type: str
:ivar location:
:vartype location: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'location_type': {'key': 'locationType', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: The only acceptable values to pass in are None and "Notebook". The default
value is None.
:paramtype type: str
:keyword location_type: The only acceptable values to pass in are None and "ArtifactId". The
default value is None.
:paramtype location_type: str
:keyword location:
:paramtype location: str
"""
super(CreatedFromDto, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.location_type = kwargs.get('location_type', None)
self.location = kwargs.get('location', None)
class CreateFlowRequest(msrest.serialization.Model):
"""CreateFlowRequest.
:ivar flow_name:
:vartype flow_name: str
:ivar description:
:vartype description: str
:ivar details:
:vartype details: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar flow_run_settings:
:vartype flow_run_settings: ~flow.models.FlowRunSettings
:ivar is_archived:
:vartype is_archived: bool
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'flow_name': {'key': 'flowName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'details': {'key': 'details', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_name:
:paramtype flow_name: str
:keyword description:
:paramtype description: str
:keyword details:
:paramtype details: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword flow_run_settings:
:paramtype flow_run_settings: ~flow.models.FlowRunSettings
:keyword is_archived:
:paramtype is_archived: bool
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(CreateFlowRequest, self).__init__(**kwargs)
self.flow_name = kwargs.get('flow_name', None)
self.description = kwargs.get('description', None)
self.details = kwargs.get('details', None)
self.tags = kwargs.get('tags', None)
self.flow = kwargs.get('flow', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.flow_type = kwargs.get('flow_type', None)
self.flow_run_settings = kwargs.get('flow_run_settings', None)
self.is_archived = kwargs.get('is_archived', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.identity = kwargs.get('identity', None)
class CreateFlowRuntimeRequest(msrest.serialization.Model):
"""CreateFlowRuntimeRequest.
:ivar runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:vartype runtime_type: str or ~flow.models.RuntimeType
:ivar identity:
:vartype identity: ~flow.models.ManagedServiceIdentity
:ivar instance_type:
:vartype instance_type: str
:ivar from_existing_endpoint:
:vartype from_existing_endpoint: bool
:ivar from_existing_deployment:
:vartype from_existing_deployment: bool
:ivar endpoint_name:
:vartype endpoint_name: str
:ivar deployment_name:
:vartype deployment_name: str
:ivar compute_instance_name:
:vartype compute_instance_name: str
:ivar from_existing_custom_app:
:vartype from_existing_custom_app: bool
:ivar custom_app_name:
:vartype custom_app_name: str
:ivar runtime_description:
:vartype runtime_description: str
:ivar environment:
:vartype environment: str
:ivar instance_count:
:vartype instance_count: int
"""
_attribute_map = {
'runtime_type': {'key': 'runtimeType', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'from_existing_endpoint': {'key': 'fromExistingEndpoint', 'type': 'bool'},
'from_existing_deployment': {'key': 'fromExistingDeployment', 'type': 'bool'},
'endpoint_name': {'key': 'endpointName', 'type': 'str'},
'deployment_name': {'key': 'deploymentName', 'type': 'str'},
'compute_instance_name': {'key': 'computeInstanceName', 'type': 'str'},
'from_existing_custom_app': {'key': 'fromExistingCustomApp', 'type': 'bool'},
'custom_app_name': {'key': 'customAppName', 'type': 'str'},
'runtime_description': {'key': 'runtimeDescription', 'type': 'str'},
'environment': {'key': 'environment', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:paramtype runtime_type: str or ~flow.models.RuntimeType
:keyword identity:
:paramtype identity: ~flow.models.ManagedServiceIdentity
:keyword instance_type:
:paramtype instance_type: str
:keyword from_existing_endpoint:
:paramtype from_existing_endpoint: bool
:keyword from_existing_deployment:
:paramtype from_existing_deployment: bool
:keyword endpoint_name:
:paramtype endpoint_name: str
:keyword deployment_name:
:paramtype deployment_name: str
:keyword compute_instance_name:
:paramtype compute_instance_name: str
:keyword from_existing_custom_app:
:paramtype from_existing_custom_app: bool
:keyword custom_app_name:
:paramtype custom_app_name: str
:keyword runtime_description:
:paramtype runtime_description: str
:keyword environment:
:paramtype environment: str
:keyword instance_count:
:paramtype instance_count: int
"""
super(CreateFlowRuntimeRequest, self).__init__(**kwargs)
self.runtime_type = kwargs.get('runtime_type', None)
self.identity = kwargs.get('identity', None)
self.instance_type = kwargs.get('instance_type', None)
self.from_existing_endpoint = kwargs.get('from_existing_endpoint', None)
self.from_existing_deployment = kwargs.get('from_existing_deployment', None)
self.endpoint_name = kwargs.get('endpoint_name', None)
self.deployment_name = kwargs.get('deployment_name', None)
self.compute_instance_name = kwargs.get('compute_instance_name', None)
self.from_existing_custom_app = kwargs.get('from_existing_custom_app', None)
self.custom_app_name = kwargs.get('custom_app_name', None)
self.runtime_description = kwargs.get('runtime_description', None)
self.environment = kwargs.get('environment', None)
self.instance_count = kwargs.get('instance_count', None)
class CreateFlowSessionRequest(msrest.serialization.Model):
"""CreateFlowSessionRequest.
:ivar python_pip_requirements:
:vartype python_pip_requirements: list[str]
:ivar base_image:
:vartype base_image: str
:ivar action: Possible values include: "Install", "Reset", "Update", "Delete".
:vartype action: str or ~flow.models.SetupFlowSessionAction
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
:ivar compute_name:
:vartype compute_name: str
"""
_attribute_map = {
'python_pip_requirements': {'key': 'pythonPipRequirements', 'type': '[str]'},
'base_image': {'key': 'baseImage', 'type': 'str'},
'action': {'key': 'action', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
'compute_name': {'key': 'computeName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword python_pip_requirements:
:paramtype python_pip_requirements: list[str]
:keyword base_image:
:paramtype base_image: str
:keyword action: Possible values include: "Install", "Reset", "Update", "Delete".
:paramtype action: str or ~flow.models.SetupFlowSessionAction
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
:keyword compute_name:
:paramtype compute_name: str
"""
super(CreateFlowSessionRequest, self).__init__(**kwargs)
self.python_pip_requirements = kwargs.get('python_pip_requirements', None)
self.base_image = kwargs.get('base_image', None)
self.action = kwargs.get('action', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.identity = kwargs.get('identity', None)
self.compute_name = kwargs.get('compute_name', None)
class CreateInferencePipelineRequest(msrest.serialization.Model):
"""CreateInferencePipelineRequest.
:ivar module_node_id:
:vartype module_node_id: str
:ivar port_name:
:vartype port_name: str
:ivar training_pipeline_draft_name:
:vartype training_pipeline_draft_name: str
:ivar training_pipeline_run_display_name:
:vartype training_pipeline_run_display_name: str
:ivar name:
:vartype name: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:vartype graph_components_mode: str or ~flow.models.GraphComponentsMode
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "AssetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI",
"AssetInDpv2WithDatasetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI",
"DatasetAndAssetInDpv2WithDatasetInDesignerUI", "Dataset", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'module_node_id': {'key': 'moduleNodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'training_pipeline_draft_name': {'key': 'trainingPipelineDraftName', 'type': 'str'},
'training_pipeline_run_display_name': {'key': 'trainingPipelineRunDisplayName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword module_node_id:
:paramtype module_node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword training_pipeline_draft_name:
:paramtype training_pipeline_draft_name: str
:keyword training_pipeline_run_display_name:
:paramtype training_pipeline_run_display_name: str
:keyword name:
:paramtype name: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "AssetInDesignerUI",
"DatasetInDpv2WithDatasetInDesignerUI", "AssetInDpv2WithDatasetInDesignerUI",
"AssetInDpv2WithAssetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "Dataset",
"Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(CreateInferencePipelineRequest, self).__init__(**kwargs)
self.module_node_id = kwargs.get('module_node_id', None)
self.port_name = kwargs.get('port_name', None)
self.training_pipeline_draft_name = kwargs.get('training_pipeline_draft_name', None)
self.training_pipeline_run_display_name = kwargs.get('training_pipeline_run_display_name', None)
self.name = kwargs.get('name', None)
self.pipeline_type = kwargs.get('pipeline_type', None)
self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None)
self.graph_components_mode = kwargs.get('graph_components_mode', None)
self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None)
self.flattened_sub_graphs = kwargs.get('flattened_sub_graphs', None)
self.pipeline_parameters = kwargs.get('pipeline_parameters', None)
self.data_path_assignments = kwargs.get('data_path_assignments', None)
self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None)
self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None)
self.graph = kwargs.get('graph', None)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.module_node_run_settings = kwargs.get('module_node_run_settings', None)
self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None)
self.tags = kwargs.get('tags', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.description = kwargs.get('description', None)
self.properties = kwargs.get('properties', None)
self.enforce_rerun = kwargs.get('enforce_rerun', None)
self.dataset_access_modes = kwargs.get('dataset_access_modes', None)
class CreateOrUpdateConnectionRequest(msrest.serialization.Model):
"""CreateOrUpdateConnectionRequest.
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar connection_scope: Possible values include: "User", "WorkspaceShared".
:vartype connection_scope: str or ~flow.models.ConnectionScope
:ivar configs: This is a dictionary.
:vartype configs: dict[str, str]
:ivar custom_configs: This is a dictionary.
:vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:ivar expiry_time:
:vartype expiry_time: ~datetime.datetime
"""
_attribute_map = {
'connection_type': {'key': 'connectionType', 'type': 'str'},
'connection_scope': {'key': 'connectionScope', 'type': 'str'},
'configs': {'key': 'configs', 'type': '{str}'},
'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'},
'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword connection_scope: Possible values include: "User", "WorkspaceShared".
:paramtype connection_scope: str or ~flow.models.ConnectionScope
:keyword configs: This is a dictionary.
:paramtype configs: dict[str, str]
:keyword custom_configs: This is a dictionary.
:paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:keyword expiry_time:
:paramtype expiry_time: ~datetime.datetime
"""
super(CreateOrUpdateConnectionRequest, self).__init__(**kwargs)
self.connection_type = kwargs.get('connection_type', None)
self.connection_scope = kwargs.get('connection_scope', None)
self.configs = kwargs.get('configs', None)
self.custom_configs = kwargs.get('custom_configs', None)
self.expiry_time = kwargs.get('expiry_time', None)
class CreateOrUpdateConnectionRequestDto(msrest.serialization.Model):
"""CreateOrUpdateConnectionRequestDto.
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar configs: This is a dictionary.
:vartype configs: dict[str, str]
:ivar custom_configs: This is a dictionary.
:vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:ivar expiry_time:
:vartype expiry_time: ~datetime.datetime
"""
_attribute_map = {
'connection_type': {'key': 'connectionType', 'type': 'str'},
'configs': {'key': 'configs', 'type': '{str}'},
'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'},
'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword configs: This is a dictionary.
:paramtype configs: dict[str, str]
:keyword custom_configs: This is a dictionary.
:paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:keyword expiry_time:
:paramtype expiry_time: ~datetime.datetime
"""
super(CreateOrUpdateConnectionRequestDto, self).__init__(**kwargs)
self.connection_type = kwargs.get('connection_type', None)
self.configs = kwargs.get('configs', None)
self.custom_configs = kwargs.get('custom_configs', None)
self.expiry_time = kwargs.get('expiry_time', None)
class CreatePipelineDraftRequest(msrest.serialization.Model):
"""CreatePipelineDraftRequest.
:ivar name:
:vartype name: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:vartype graph_components_mode: str or ~flow.models.GraphComponentsMode
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "AssetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI",
"AssetInDpv2WithDatasetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI",
"DatasetAndAssetInDpv2WithDatasetInDesignerUI", "Dataset", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "AssetInDesignerUI",
"DatasetInDpv2WithDatasetInDesignerUI", "AssetInDpv2WithDatasetInDesignerUI",
"AssetInDpv2WithAssetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "Dataset",
"Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(CreatePipelineDraftRequest, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.pipeline_type = kwargs.get('pipeline_type', None)
self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None)
self.graph_components_mode = kwargs.get('graph_components_mode', None)
self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None)
self.flattened_sub_graphs = kwargs.get('flattened_sub_graphs', None)
self.pipeline_parameters = kwargs.get('pipeline_parameters', None)
self.data_path_assignments = kwargs.get('data_path_assignments', None)
self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None)
self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None)
self.graph = kwargs.get('graph', None)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.module_node_run_settings = kwargs.get('module_node_run_settings', None)
self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None)
self.tags = kwargs.get('tags', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.description = kwargs.get('description', None)
self.properties = kwargs.get('properties', None)
self.enforce_rerun = kwargs.get('enforce_rerun', None)
self.dataset_access_modes = kwargs.get('dataset_access_modes', None)
class CreatePipelineJobScheduleDto(msrest.serialization.Model):
"""CreatePipelineJobScheduleDto.
:ivar name:
:vartype name: str
:ivar pipeline_job_name:
:vartype pipeline_job_name: str
:ivar pipeline_job_runtime_settings:
:vartype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:ivar display_name:
:vartype display_name: str
:ivar trigger_type: Possible values include: "Recurrence", "Cron".
:vartype trigger_type: str or ~flow.models.TriggerType
:ivar recurrence:
:vartype recurrence: ~flow.models.Recurrence
:ivar cron:
:vartype cron: ~flow.models.Cron
:ivar status: Possible values include: "Enabled", "Disabled".
:vartype status: str or ~flow.models.ScheduleStatus
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'},
'pipeline_job_runtime_settings': {'key': 'pipelineJobRuntimeSettings', 'type': 'PipelineJobRuntimeBasicSettings'},
'display_name': {'key': 'displayName', 'type': 'str'},
'trigger_type': {'key': 'triggerType', 'type': 'str'},
'recurrence': {'key': 'recurrence', 'type': 'Recurrence'},
'cron': {'key': 'cron', 'type': 'Cron'},
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword pipeline_job_name:
:paramtype pipeline_job_name: str
:keyword pipeline_job_runtime_settings:
:paramtype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:keyword display_name:
:paramtype display_name: str
:keyword trigger_type: Possible values include: "Recurrence", "Cron".
:paramtype trigger_type: str or ~flow.models.TriggerType
:keyword recurrence:
:paramtype recurrence: ~flow.models.Recurrence
:keyword cron:
:paramtype cron: ~flow.models.Cron
:keyword status: Possible values include: "Enabled", "Disabled".
:paramtype status: str or ~flow.models.ScheduleStatus
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(CreatePipelineJobScheduleDto, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.pipeline_job_name = kwargs.get('pipeline_job_name', None)
self.pipeline_job_runtime_settings = kwargs.get('pipeline_job_runtime_settings', None)
self.display_name = kwargs.get('display_name', None)
self.trigger_type = kwargs.get('trigger_type', None)
self.recurrence = kwargs.get('recurrence', None)
self.cron = kwargs.get('cron', None)
self.status = kwargs.get('status', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
class CreatePublishedPipelineRequest(msrest.serialization.Model):
"""CreatePublishedPipelineRequest.
:ivar use_pipeline_endpoint:
:vartype use_pipeline_endpoint: bool
:ivar pipeline_name:
:vartype pipeline_name: str
:ivar pipeline_description:
:vartype pipeline_description: str
:ivar use_existing_pipeline_endpoint:
:vartype use_existing_pipeline_endpoint: bool
:ivar pipeline_endpoint_name:
:vartype pipeline_endpoint_name: str
:ivar pipeline_endpoint_description:
:vartype pipeline_endpoint_description: str
:ivar set_as_default_pipeline_for_endpoint:
:vartype set_as_default_pipeline_for_endpoint: bool
:ivar step_tags: This is a dictionary.
:vartype step_tags: dict[str, str]
:ivar experiment_name:
:vartype experiment_name: str
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar enable_notification:
:vartype enable_notification: bool
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar display_name:
:vartype display_name: str
:ivar run_id:
:vartype run_id: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "AssetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI",
"AssetInDpv2WithDatasetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI",
"DatasetAndAssetInDpv2WithDatasetInDesignerUI", "Dataset", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'use_pipeline_endpoint': {'key': 'usePipelineEndpoint', 'type': 'bool'},
'pipeline_name': {'key': 'pipelineName', 'type': 'str'},
'pipeline_description': {'key': 'pipelineDescription', 'type': 'str'},
'use_existing_pipeline_endpoint': {'key': 'useExistingPipelineEndpoint', 'type': 'bool'},
'pipeline_endpoint_name': {'key': 'pipelineEndpointName', 'type': 'str'},
'pipeline_endpoint_description': {'key': 'pipelineEndpointDescription', 'type': 'str'},
'set_as_default_pipeline_for_endpoint': {'key': 'setAsDefaultPipelineForEndpoint', 'type': 'bool'},
'step_tags': {'key': 'stepTags', 'type': '{str}'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'enable_notification': {'key': 'enableNotification', 'type': 'bool'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'display_name': {'key': 'displayName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword use_pipeline_endpoint:
:paramtype use_pipeline_endpoint: bool
:keyword pipeline_name:
:paramtype pipeline_name: str
:keyword pipeline_description:
:paramtype pipeline_description: str
:keyword use_existing_pipeline_endpoint:
:paramtype use_existing_pipeline_endpoint: bool
:keyword pipeline_endpoint_name:
:paramtype pipeline_endpoint_name: str
:keyword pipeline_endpoint_description:
:paramtype pipeline_endpoint_description: str
:keyword set_as_default_pipeline_for_endpoint:
:paramtype set_as_default_pipeline_for_endpoint: bool
:keyword step_tags: This is a dictionary.
:paramtype step_tags: dict[str, str]
:keyword experiment_name:
:paramtype experiment_name: str
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword enable_notification:
:paramtype enable_notification: bool
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword display_name:
:paramtype display_name: str
:keyword run_id:
:paramtype run_id: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "AssetInDesignerUI",
"DatasetInDpv2WithDatasetInDesignerUI", "AssetInDpv2WithDatasetInDesignerUI",
"AssetInDpv2WithAssetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "Dataset",
"Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(CreatePublishedPipelineRequest, self).__init__(**kwargs)
self.use_pipeline_endpoint = kwargs.get('use_pipeline_endpoint', None)
self.pipeline_name = kwargs.get('pipeline_name', None)
self.pipeline_description = kwargs.get('pipeline_description', None)
self.use_existing_pipeline_endpoint = kwargs.get('use_existing_pipeline_endpoint', None)
self.pipeline_endpoint_name = kwargs.get('pipeline_endpoint_name', None)
self.pipeline_endpoint_description = kwargs.get('pipeline_endpoint_description', None)
self.set_as_default_pipeline_for_endpoint = kwargs.get('set_as_default_pipeline_for_endpoint', None)
self.step_tags = kwargs.get('step_tags', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.pipeline_parameters = kwargs.get('pipeline_parameters', None)
self.data_path_assignments = kwargs.get('data_path_assignments', None)
self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None)
self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None)
self.enable_notification = kwargs.get('enable_notification', None)
self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None)
self.display_name = kwargs.get('display_name', None)
self.run_id = kwargs.get('run_id', None)
self.parent_run_id = kwargs.get('parent_run_id', None)
self.graph = kwargs.get('graph', None)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.module_node_run_settings = kwargs.get('module_node_run_settings', None)
self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None)
self.tags = kwargs.get('tags', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.description = kwargs.get('description', None)
self.properties = kwargs.get('properties', None)
self.enforce_rerun = kwargs.get('enforce_rerun', None)
self.dataset_access_modes = kwargs.get('dataset_access_modes', None)
class CreateRealTimeEndpointRequest(msrest.serialization.Model):
"""CreateRealTimeEndpointRequest.
:ivar name:
:vartype name: str
:ivar compute_info:
:vartype compute_info: ~flow.models.ComputeInfo
:ivar description:
:vartype description: str
:ivar linked_pipeline_draft_id:
:vartype linked_pipeline_draft_id: str
:ivar linked_pipeline_run_id:
:vartype linked_pipeline_run_id: str
:ivar aks_advance_settings:
:vartype aks_advance_settings: ~flow.models.AKSAdvanceSettings
:ivar aci_advance_settings:
:vartype aci_advance_settings: ~flow.models.ACIAdvanceSettings
:ivar linked_training_pipeline_run_id:
:vartype linked_training_pipeline_run_id: str
:ivar linked_experiment_name:
:vartype linked_experiment_name: str
:ivar graph_nodes_run_id_mapping: This is a dictionary.
:vartype graph_nodes_run_id_mapping: dict[str, str]
:ivar workflow:
:vartype workflow: ~flow.models.PipelineGraph
:ivar inputs:
:vartype inputs: list[~flow.models.InputOutputPortMetadata]
:ivar outputs:
:vartype outputs: list[~flow.models.InputOutputPortMetadata]
:ivar example_request:
:vartype example_request: ~flow.models.ExampleRequest
:ivar user_storage_connection_string:
:vartype user_storage_connection_string: str
:ivar user_storage_endpoint_uri:
:vartype user_storage_endpoint_uri: str
:ivar user_storage_workspace_sai_token:
:vartype user_storage_workspace_sai_token: str
:ivar user_storage_container_name:
:vartype user_storage_container_name: str
:ivar pipeline_run_id:
:vartype pipeline_run_id: str
:ivar root_pipeline_run_id:
:vartype root_pipeline_run_id: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar experiment_id:
:vartype experiment_id: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'compute_info': {'key': 'computeInfo', 'type': 'ComputeInfo'},
'description': {'key': 'description', 'type': 'str'},
'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'},
'linked_pipeline_run_id': {'key': 'linkedPipelineRunId', 'type': 'str'},
'aks_advance_settings': {'key': 'aksAdvanceSettings', 'type': 'AKSAdvanceSettings'},
'aci_advance_settings': {'key': 'aciAdvanceSettings', 'type': 'ACIAdvanceSettings'},
'linked_training_pipeline_run_id': {'key': 'linkedTrainingPipelineRunId', 'type': 'str'},
'linked_experiment_name': {'key': 'linkedExperimentName', 'type': 'str'},
'graph_nodes_run_id_mapping': {'key': 'graphNodesRunIdMapping', 'type': '{str}'},
'workflow': {'key': 'workflow', 'type': 'PipelineGraph'},
'inputs': {'key': 'inputs', 'type': '[InputOutputPortMetadata]'},
'outputs': {'key': 'outputs', 'type': '[InputOutputPortMetadata]'},
'example_request': {'key': 'exampleRequest', 'type': 'ExampleRequest'},
'user_storage_connection_string': {'key': 'userStorageConnectionString', 'type': 'str'},
'user_storage_endpoint_uri': {'key': 'userStorageEndpointUri', 'type': 'str'},
'user_storage_workspace_sai_token': {'key': 'userStorageWorkspaceSaiToken', 'type': 'str'},
'user_storage_container_name': {'key': 'userStorageContainerName', 'type': 'str'},
'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
'root_pipeline_run_id': {'key': 'rootPipelineRunId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword compute_info:
:paramtype compute_info: ~flow.models.ComputeInfo
:keyword description:
:paramtype description: str
:keyword linked_pipeline_draft_id:
:paramtype linked_pipeline_draft_id: str
:keyword linked_pipeline_run_id:
:paramtype linked_pipeline_run_id: str
:keyword aks_advance_settings:
:paramtype aks_advance_settings: ~flow.models.AKSAdvanceSettings
:keyword aci_advance_settings:
:paramtype aci_advance_settings: ~flow.models.ACIAdvanceSettings
:keyword linked_training_pipeline_run_id:
:paramtype linked_training_pipeline_run_id: str
:keyword linked_experiment_name:
:paramtype linked_experiment_name: str
:keyword graph_nodes_run_id_mapping: This is a dictionary.
:paramtype graph_nodes_run_id_mapping: dict[str, str]
:keyword workflow:
:paramtype workflow: ~flow.models.PipelineGraph
:keyword inputs:
:paramtype inputs: list[~flow.models.InputOutputPortMetadata]
:keyword outputs:
:paramtype outputs: list[~flow.models.InputOutputPortMetadata]
:keyword example_request:
:paramtype example_request: ~flow.models.ExampleRequest
:keyword user_storage_connection_string:
:paramtype user_storage_connection_string: str
:keyword user_storage_endpoint_uri:
:paramtype user_storage_endpoint_uri: str
:keyword user_storage_workspace_sai_token:
:paramtype user_storage_workspace_sai_token: str
:keyword user_storage_container_name:
:paramtype user_storage_container_name: str
:keyword pipeline_run_id:
:paramtype pipeline_run_id: str
:keyword root_pipeline_run_id:
:paramtype root_pipeline_run_id: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword experiment_id:
:paramtype experiment_id: str
"""
super(CreateRealTimeEndpointRequest, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.compute_info = kwargs.get('compute_info', None)
self.description = kwargs.get('description', None)
self.linked_pipeline_draft_id = kwargs.get('linked_pipeline_draft_id', None)
self.linked_pipeline_run_id = kwargs.get('linked_pipeline_run_id', None)
self.aks_advance_settings = kwargs.get('aks_advance_settings', None)
self.aci_advance_settings = kwargs.get('aci_advance_settings', None)
self.linked_training_pipeline_run_id = kwargs.get('linked_training_pipeline_run_id', None)
self.linked_experiment_name = kwargs.get('linked_experiment_name', None)
self.graph_nodes_run_id_mapping = kwargs.get('graph_nodes_run_id_mapping', None)
self.workflow = kwargs.get('workflow', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.example_request = kwargs.get('example_request', None)
self.user_storage_connection_string = kwargs.get('user_storage_connection_string', None)
self.user_storage_endpoint_uri = kwargs.get('user_storage_endpoint_uri', None)
self.user_storage_workspace_sai_token = kwargs.get('user_storage_workspace_sai_token', None)
self.user_storage_container_name = kwargs.get('user_storage_container_name', None)
self.pipeline_run_id = kwargs.get('pipeline_run_id', None)
self.root_pipeline_run_id = kwargs.get('root_pipeline_run_id', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.experiment_id = kwargs.get('experiment_id', None)
class CreationContext(msrest.serialization.Model):
"""CreationContext.
:ivar created_time:
:vartype created_time: ~datetime.datetime
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar creation_source:
:vartype creation_source: str
"""
_attribute_map = {
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'creation_source': {'key': 'creationSource', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword created_time:
:paramtype created_time: ~datetime.datetime
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword creation_source:
:paramtype creation_source: str
"""
super(CreationContext, self).__init__(**kwargs)
self.created_time = kwargs.get('created_time', None)
self.created_by = kwargs.get('created_by', None)
self.creation_source = kwargs.get('creation_source', None)
class Cron(msrest.serialization.Model):
"""Cron.
:ivar expression:
:vartype expression: str
:ivar end_time:
:vartype end_time: str
:ivar start_time:
:vartype start_time: str
:ivar time_zone:
:vartype time_zone: str
"""
_attribute_map = {
'expression': {'key': 'expression', 'type': 'str'},
'end_time': {'key': 'endTime', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'str'},
'time_zone': {'key': 'timeZone', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword expression:
:paramtype expression: str
:keyword end_time:
:paramtype end_time: str
:keyword start_time:
:paramtype start_time: str
:keyword time_zone:
:paramtype time_zone: str
"""
super(Cron, self).__init__(**kwargs)
self.expression = kwargs.get('expression', None)
self.end_time = kwargs.get('end_time', None)
self.start_time = kwargs.get('start_time', None)
self.time_zone = kwargs.get('time_zone', None)
class CustomConnectionConfig(msrest.serialization.Model):
"""CustomConnectionConfig.
:ivar config_value_type: Possible values include: "String", "Secret".
:vartype config_value_type: str or ~flow.models.ConfigValueType
:ivar value:
:vartype value: str
"""
_attribute_map = {
'config_value_type': {'key': 'configValueType', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword config_value_type: Possible values include: "String", "Secret".
:paramtype config_value_type: str or ~flow.models.ConfigValueType
:keyword value:
:paramtype value: str
"""
super(CustomConnectionConfig, self).__init__(**kwargs)
self.config_value_type = kwargs.get('config_value_type', None)
self.value = kwargs.get('value', None)
class CustomReference(msrest.serialization.Model):
"""CustomReference.
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(CustomReference, self).__init__(**kwargs)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
class Data(msrest.serialization.Model):
"""Data.
:ivar data_location:
:vartype data_location: ~flow.models.ExecutionDataLocation
:ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:vartype mechanism: str or ~flow.models.DeliveryMechanism
:ivar environment_variable_name:
:vartype environment_variable_name: str
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar options: Dictionary of :code:`<string>`.
:vartype options: dict[str, str]
"""
_attribute_map = {
'data_location': {'key': 'dataLocation', 'type': 'ExecutionDataLocation'},
'mechanism': {'key': 'mechanism', 'type': 'str'},
'environment_variable_name': {'key': 'environmentVariableName', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'options': {'key': 'options', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_location:
:paramtype data_location: ~flow.models.ExecutionDataLocation
:keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:paramtype mechanism: str or ~flow.models.DeliveryMechanism
:keyword environment_variable_name:
:paramtype environment_variable_name: str
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword options: Dictionary of :code:`<string>`.
:paramtype options: dict[str, str]
"""
super(Data, self).__init__(**kwargs)
self.data_location = kwargs.get('data_location', None)
self.mechanism = kwargs.get('mechanism', None)
self.environment_variable_name = kwargs.get('environment_variable_name', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.overwrite = kwargs.get('overwrite', None)
self.options = kwargs.get('options', None)
class DatabaseSink(msrest.serialization.Model):
"""DatabaseSink.
:ivar connection:
:vartype connection: str
:ivar table:
:vartype table: str
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'table': {'key': 'table', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword table:
:paramtype table: str
"""
super(DatabaseSink, self).__init__(**kwargs)
self.connection = kwargs.get('connection', None)
self.table = kwargs.get('table', None)
class DatabaseSource(msrest.serialization.Model):
"""DatabaseSource.
:ivar connection:
:vartype connection: str
:ivar query:
:vartype query: str
:ivar stored_procedure_name:
:vartype stored_procedure_name: str
:ivar stored_procedure_parameters:
:vartype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter]
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'query': {'key': 'query', 'type': 'str'},
'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[StoredProcedureParameter]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword query:
:paramtype query: str
:keyword stored_procedure_name:
:paramtype stored_procedure_name: str
:keyword stored_procedure_parameters:
:paramtype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter]
"""
super(DatabaseSource, self).__init__(**kwargs)
self.connection = kwargs.get('connection', None)
self.query = kwargs.get('query', None)
self.stored_procedure_name = kwargs.get('stored_procedure_name', None)
self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None)
class DatabricksComputeInfo(msrest.serialization.Model):
"""DatabricksComputeInfo.
:ivar existing_cluster_id:
:vartype existing_cluster_id: str
"""
_attribute_map = {
'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword existing_cluster_id:
:paramtype existing_cluster_id: str
"""
super(DatabricksComputeInfo, self).__init__(**kwargs)
self.existing_cluster_id = kwargs.get('existing_cluster_id', None)
class DatabricksConfiguration(msrest.serialization.Model):
"""DatabricksConfiguration.
:ivar workers:
:vartype workers: int
:ivar minimum_worker_count:
:vartype minimum_worker_count: int
:ivar max_mum_worker_count:
:vartype max_mum_worker_count: int
:ivar spark_version:
:vartype spark_version: str
:ivar node_type_id:
:vartype node_type_id: str
:ivar spark_conf: Dictionary of :code:`<string>`.
:vartype spark_conf: dict[str, str]
:ivar spark_env_vars: Dictionary of :code:`<string>`.
:vartype spark_env_vars: dict[str, str]
:ivar cluster_log_conf_dbfs_path:
:vartype cluster_log_conf_dbfs_path: str
:ivar dbfs_init_scripts:
:vartype dbfs_init_scripts: list[~flow.models.InitScriptInfoDto]
:ivar instance_pool_id:
:vartype instance_pool_id: str
:ivar timeout_seconds:
:vartype timeout_seconds: int
:ivar notebook_task:
:vartype notebook_task: ~flow.models.NoteBookTaskDto
:ivar spark_python_task:
:vartype spark_python_task: ~flow.models.SparkPythonTaskDto
:ivar spark_jar_task:
:vartype spark_jar_task: ~flow.models.SparkJarTaskDto
:ivar spark_submit_task:
:vartype spark_submit_task: ~flow.models.SparkSubmitTaskDto
:ivar jar_libraries:
:vartype jar_libraries: list[str]
:ivar egg_libraries:
:vartype egg_libraries: list[str]
:ivar whl_libraries:
:vartype whl_libraries: list[str]
:ivar pypi_libraries:
:vartype pypi_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto]
:ivar r_cran_libraries:
:vartype r_cran_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto]
:ivar maven_libraries:
:vartype maven_libraries: list[~flow.models.MavenLibraryDto]
:ivar libraries:
:vartype libraries: list[any]
:ivar linked_adb_workspace_metadata:
:vartype linked_adb_workspace_metadata: ~flow.models.LinkedADBWorkspaceMetadata
:ivar databrick_resource_id:
:vartype databrick_resource_id: str
:ivar auto_scale:
:vartype auto_scale: bool
"""
_attribute_map = {
'workers': {'key': 'workers', 'type': 'int'},
'minimum_worker_count': {'key': 'minimumWorkerCount', 'type': 'int'},
'max_mum_worker_count': {'key': 'maxMumWorkerCount', 'type': 'int'},
'spark_version': {'key': 'sparkVersion', 'type': 'str'},
'node_type_id': {'key': 'nodeTypeId', 'type': 'str'},
'spark_conf': {'key': 'sparkConf', 'type': '{str}'},
'spark_env_vars': {'key': 'sparkEnvVars', 'type': '{str}'},
'cluster_log_conf_dbfs_path': {'key': 'clusterLogConfDbfsPath', 'type': 'str'},
'dbfs_init_scripts': {'key': 'dbfsInitScripts', 'type': '[InitScriptInfoDto]'},
'instance_pool_id': {'key': 'instancePoolId', 'type': 'str'},
'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'},
'notebook_task': {'key': 'notebookTask', 'type': 'NoteBookTaskDto'},
'spark_python_task': {'key': 'sparkPythonTask', 'type': 'SparkPythonTaskDto'},
'spark_jar_task': {'key': 'sparkJarTask', 'type': 'SparkJarTaskDto'},
'spark_submit_task': {'key': 'sparkSubmitTask', 'type': 'SparkSubmitTaskDto'},
'jar_libraries': {'key': 'jarLibraries', 'type': '[str]'},
'egg_libraries': {'key': 'eggLibraries', 'type': '[str]'},
'whl_libraries': {'key': 'whlLibraries', 'type': '[str]'},
'pypi_libraries': {'key': 'pypiLibraries', 'type': '[PythonPyPiOrRCranLibraryDto]'},
'r_cran_libraries': {'key': 'rCranLibraries', 'type': '[PythonPyPiOrRCranLibraryDto]'},
'maven_libraries': {'key': 'mavenLibraries', 'type': '[MavenLibraryDto]'},
'libraries': {'key': 'libraries', 'type': '[object]'},
'linked_adb_workspace_metadata': {'key': 'linkedADBWorkspaceMetadata', 'type': 'LinkedADBWorkspaceMetadata'},
'databrick_resource_id': {'key': 'databrickResourceId', 'type': 'str'},
'auto_scale': {'key': 'autoScale', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword workers:
:paramtype workers: int
:keyword minimum_worker_count:
:paramtype minimum_worker_count: int
:keyword max_mum_worker_count:
:paramtype max_mum_worker_count: int
:keyword spark_version:
:paramtype spark_version: str
:keyword node_type_id:
:paramtype node_type_id: str
:keyword spark_conf: Dictionary of :code:`<string>`.
:paramtype spark_conf: dict[str, str]
:keyword spark_env_vars: Dictionary of :code:`<string>`.
:paramtype spark_env_vars: dict[str, str]
:keyword cluster_log_conf_dbfs_path:
:paramtype cluster_log_conf_dbfs_path: str
:keyword dbfs_init_scripts:
:paramtype dbfs_init_scripts: list[~flow.models.InitScriptInfoDto]
:keyword instance_pool_id:
:paramtype instance_pool_id: str
:keyword timeout_seconds:
:paramtype timeout_seconds: int
:keyword notebook_task:
:paramtype notebook_task: ~flow.models.NoteBookTaskDto
:keyword spark_python_task:
:paramtype spark_python_task: ~flow.models.SparkPythonTaskDto
:keyword spark_jar_task:
:paramtype spark_jar_task: ~flow.models.SparkJarTaskDto
:keyword spark_submit_task:
:paramtype spark_submit_task: ~flow.models.SparkSubmitTaskDto
:keyword jar_libraries:
:paramtype jar_libraries: list[str]
:keyword egg_libraries:
:paramtype egg_libraries: list[str]
:keyword whl_libraries:
:paramtype whl_libraries: list[str]
:keyword pypi_libraries:
:paramtype pypi_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto]
:keyword r_cran_libraries:
:paramtype r_cran_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto]
:keyword maven_libraries:
:paramtype maven_libraries: list[~flow.models.MavenLibraryDto]
:keyword libraries:
:paramtype libraries: list[any]
:keyword linked_adb_workspace_metadata:
:paramtype linked_adb_workspace_metadata: ~flow.models.LinkedADBWorkspaceMetadata
:keyword databrick_resource_id:
:paramtype databrick_resource_id: str
:keyword auto_scale:
:paramtype auto_scale: bool
"""
super(DatabricksConfiguration, self).__init__(**kwargs)
self.workers = kwargs.get('workers', None)
self.minimum_worker_count = kwargs.get('minimum_worker_count', None)
self.max_mum_worker_count = kwargs.get('max_mum_worker_count', None)
self.spark_version = kwargs.get('spark_version', None)
self.node_type_id = kwargs.get('node_type_id', None)
self.spark_conf = kwargs.get('spark_conf', None)
self.spark_env_vars = kwargs.get('spark_env_vars', None)
self.cluster_log_conf_dbfs_path = kwargs.get('cluster_log_conf_dbfs_path', None)
self.dbfs_init_scripts = kwargs.get('dbfs_init_scripts', None)
self.instance_pool_id = kwargs.get('instance_pool_id', None)
self.timeout_seconds = kwargs.get('timeout_seconds', None)
self.notebook_task = kwargs.get('notebook_task', None)
self.spark_python_task = kwargs.get('spark_python_task', None)
self.spark_jar_task = kwargs.get('spark_jar_task', None)
self.spark_submit_task = kwargs.get('spark_submit_task', None)
self.jar_libraries = kwargs.get('jar_libraries', None)
self.egg_libraries = kwargs.get('egg_libraries', None)
self.whl_libraries = kwargs.get('whl_libraries', None)
self.pypi_libraries = kwargs.get('pypi_libraries', None)
self.r_cran_libraries = kwargs.get('r_cran_libraries', None)
self.maven_libraries = kwargs.get('maven_libraries', None)
self.libraries = kwargs.get('libraries', None)
self.linked_adb_workspace_metadata = kwargs.get('linked_adb_workspace_metadata', None)
self.databrick_resource_id = kwargs.get('databrick_resource_id', None)
self.auto_scale = kwargs.get('auto_scale', None)
class DatacacheConfiguration(msrest.serialization.Model):
"""DatacacheConfiguration.
:ivar datacache_id:
:vartype datacache_id: str
:ivar datacache_store:
:vartype datacache_store: str
:ivar dataset_id:
:vartype dataset_id: str
:ivar mode: The only acceptable values to pass in are None and "Mount". The default value is
None.
:vartype mode: str
:ivar replica:
:vartype replica: int
:ivar failure_fallback:
:vartype failure_fallback: bool
:ivar path_on_compute:
:vartype path_on_compute: str
"""
_attribute_map = {
'datacache_id': {'key': 'datacacheId', 'type': 'str'},
'datacache_store': {'key': 'datacacheStore', 'type': 'str'},
'dataset_id': {'key': 'datasetId', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
'replica': {'key': 'replica', 'type': 'int'},
'failure_fallback': {'key': 'failureFallback', 'type': 'bool'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword datacache_id:
:paramtype datacache_id: str
:keyword datacache_store:
:paramtype datacache_store: str
:keyword dataset_id:
:paramtype dataset_id: str
:keyword mode: The only acceptable values to pass in are None and "Mount". The default value
is None.
:paramtype mode: str
:keyword replica:
:paramtype replica: int
:keyword failure_fallback:
:paramtype failure_fallback: bool
:keyword path_on_compute:
:paramtype path_on_compute: str
"""
super(DatacacheConfiguration, self).__init__(**kwargs)
self.datacache_id = kwargs.get('datacache_id', None)
self.datacache_store = kwargs.get('datacache_store', None)
self.dataset_id = kwargs.get('dataset_id', None)
self.mode = kwargs.get('mode', None)
self.replica = kwargs.get('replica', None)
self.failure_fallback = kwargs.get('failure_fallback', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
class DataInfo(msrest.serialization.Model):
"""DataInfo.
:ivar feed_name:
:vartype feed_name: str
:ivar id:
:vartype id: str
:ivar data_source_type: Possible values include: "None", "PipelineDataSource", "AmlDataset",
"GlobalDataset", "FeedModel", "FeedDataset", "AmlDataVersion", "AMLModelVersion".
:vartype data_source_type: str or ~flow.models.DataSourceType
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar data_type_id:
:vartype data_type_id: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar modified_date:
:vartype modified_date: ~datetime.datetime
:ivar registered_by:
:vartype registered_by: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar created_by_studio:
:vartype created_by_studio: bool
:ivar data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake",
"AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS",
"AzureMySqlDatabase", "Custom", "Hdfs".
:vartype data_reference_type: str or ~flow.models.DataReferenceType
:ivar dataset_type:
:vartype dataset_type: str
:ivar saved_dataset_id:
:vartype saved_dataset_id: str
:ivar dataset_version_id:
:vartype dataset_version_id: str
:ivar is_visible:
:vartype is_visible: bool
:ivar is_registered:
:vartype is_registered: bool
:ivar properties: This is a dictionary.
:vartype properties: dict[str, any]
:ivar connection_string:
:vartype connection_string: str
:ivar container_name:
:vartype container_name: str
:ivar data_storage_endpoint_uri:
:vartype data_storage_endpoint_uri: str
:ivar workspace_sai_token:
:vartype workspace_sai_token: str
:ivar aml_dataset_data_flow:
:vartype aml_dataset_data_flow: str
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar arm_id:
:vartype arm_id: str
:ivar asset_id:
:vartype asset_id: str
:ivar asset_uri:
:vartype asset_uri: str
:ivar asset_type:
:vartype asset_type: str
:ivar is_data_v2:
:vartype is_data_v2: bool
:ivar asset_scope_type: Possible values include: "Workspace", "Global", "Feed", "All".
:vartype asset_scope_type: str or ~flow.models.AssetScopeTypes
:ivar pipeline_run_id:
:vartype pipeline_run_id: str
:ivar module_node_id:
:vartype module_node_id: str
:ivar output_port_name:
:vartype output_port_name: str
"""
_attribute_map = {
'feed_name': {'key': 'feedName', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'modified_date': {'key': 'modifiedDate', 'type': 'iso-8601'},
'registered_by': {'key': 'registeredBy', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'created_by_studio': {'key': 'createdByStudio', 'type': 'bool'},
'data_reference_type': {'key': 'dataReferenceType', 'type': 'str'},
'dataset_type': {'key': 'datasetType', 'type': 'str'},
'saved_dataset_id': {'key': 'savedDatasetId', 'type': 'str'},
'dataset_version_id': {'key': 'datasetVersionId', 'type': 'str'},
'is_visible': {'key': 'isVisible', 'type': 'bool'},
'is_registered': {'key': 'isRegistered', 'type': 'bool'},
'properties': {'key': 'properties', 'type': '{object}'},
'connection_string': {'key': 'connectionString', 'type': 'str'},
'container_name': {'key': 'containerName', 'type': 'str'},
'data_storage_endpoint_uri': {'key': 'dataStorageEndpointUri', 'type': 'str'},
'workspace_sai_token': {'key': 'workspaceSaiToken', 'type': 'str'},
'aml_dataset_data_flow': {'key': 'amlDatasetDataFlow', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'arm_id': {'key': 'armId', 'type': 'str'},
'asset_id': {'key': 'assetId', 'type': 'str'},
'asset_uri': {'key': 'assetUri', 'type': 'str'},
'asset_type': {'key': 'assetType', 'type': 'str'},
'is_data_v2': {'key': 'isDataV2', 'type': 'bool'},
'asset_scope_type': {'key': 'assetScopeType', 'type': 'str'},
'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
'module_node_id': {'key': 'moduleNodeId', 'type': 'str'},
'output_port_name': {'key': 'outputPortName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword feed_name:
:paramtype feed_name: str
:keyword id:
:paramtype id: str
:keyword data_source_type: Possible values include: "None", "PipelineDataSource", "AmlDataset",
"GlobalDataset", "FeedModel", "FeedDataset", "AmlDataVersion", "AMLModelVersion".
:paramtype data_source_type: str or ~flow.models.DataSourceType
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword data_type_id:
:paramtype data_type_id: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword modified_date:
:paramtype modified_date: ~datetime.datetime
:keyword registered_by:
:paramtype registered_by: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword created_by_studio:
:paramtype created_by_studio: bool
:keyword data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake",
"AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS",
"AzureMySqlDatabase", "Custom", "Hdfs".
:paramtype data_reference_type: str or ~flow.models.DataReferenceType
:keyword dataset_type:
:paramtype dataset_type: str
:keyword saved_dataset_id:
:paramtype saved_dataset_id: str
:keyword dataset_version_id:
:paramtype dataset_version_id: str
:keyword is_visible:
:paramtype is_visible: bool
:keyword is_registered:
:paramtype is_registered: bool
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, any]
:keyword connection_string:
:paramtype connection_string: str
:keyword container_name:
:paramtype container_name: str
:keyword data_storage_endpoint_uri:
:paramtype data_storage_endpoint_uri: str
:keyword workspace_sai_token:
:paramtype workspace_sai_token: str
:keyword aml_dataset_data_flow:
:paramtype aml_dataset_data_flow: str
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword arm_id:
:paramtype arm_id: str
:keyword asset_id:
:paramtype asset_id: str
:keyword asset_uri:
:paramtype asset_uri: str
:keyword asset_type:
:paramtype asset_type: str
:keyword is_data_v2:
:paramtype is_data_v2: bool
:keyword asset_scope_type: Possible values include: "Workspace", "Global", "Feed", "All".
:paramtype asset_scope_type: str or ~flow.models.AssetScopeTypes
:keyword pipeline_run_id:
:paramtype pipeline_run_id: str
:keyword module_node_id:
:paramtype module_node_id: str
:keyword output_port_name:
:paramtype output_port_name: str
"""
super(DataInfo, self).__init__(**kwargs)
self.feed_name = kwargs.get('feed_name', None)
self.id = kwargs.get('id', None)
self.data_source_type = kwargs.get('data_source_type', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.data_type_id = kwargs.get('data_type_id', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
self.created_date = kwargs.get('created_date', None)
self.modified_date = kwargs.get('modified_date', None)
self.registered_by = kwargs.get('registered_by', None)
self.tags = kwargs.get('tags', None)
self.created_by_studio = kwargs.get('created_by_studio', None)
self.data_reference_type = kwargs.get('data_reference_type', None)
self.dataset_type = kwargs.get('dataset_type', None)
self.saved_dataset_id = kwargs.get('saved_dataset_id', None)
self.dataset_version_id = kwargs.get('dataset_version_id', None)
self.is_visible = kwargs.get('is_visible', None)
self.is_registered = kwargs.get('is_registered', None)
self.properties = kwargs.get('properties', None)
self.connection_string = kwargs.get('connection_string', None)
self.container_name = kwargs.get('container_name', None)
self.data_storage_endpoint_uri = kwargs.get('data_storage_endpoint_uri', None)
self.workspace_sai_token = kwargs.get('workspace_sai_token', None)
self.aml_dataset_data_flow = kwargs.get('aml_dataset_data_flow', None)
self.system_data = kwargs.get('system_data', None)
self.arm_id = kwargs.get('arm_id', None)
self.asset_id = kwargs.get('asset_id', None)
self.asset_uri = kwargs.get('asset_uri', None)
self.asset_type = kwargs.get('asset_type', None)
self.is_data_v2 = kwargs.get('is_data_v2', None)
self.asset_scope_type = kwargs.get('asset_scope_type', None)
self.pipeline_run_id = kwargs.get('pipeline_run_id', None)
self.module_node_id = kwargs.get('module_node_id', None)
self.output_port_name = kwargs.get('output_port_name', None)
class DataLocation(msrest.serialization.Model):
"""DataLocation.
:ivar storage_type: Possible values include: "None", "AzureBlob", "Artifact", "Snapshot",
"SavedAmlDataset", "Asset".
:vartype storage_type: str or ~flow.models.DataLocationStorageType
:ivar storage_id:
:vartype storage_id: str
:ivar uri:
:vartype uri: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_reference:
:vartype data_reference: ~flow.models.DataReference
:ivar aml_dataset:
:vartype aml_dataset: ~flow.models.AmlDataset
:ivar asset_definition:
:vartype asset_definition: ~flow.models.AssetDefinition
"""
_attribute_map = {
'storage_type': {'key': 'storageType', 'type': 'str'},
'storage_id': {'key': 'storageId', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_reference': {'key': 'dataReference', 'type': 'DataReference'},
'aml_dataset': {'key': 'amlDataset', 'type': 'AmlDataset'},
'asset_definition': {'key': 'assetDefinition', 'type': 'AssetDefinition'},
}
def __init__(
self,
**kwargs
):
"""
:keyword storage_type: Possible values include: "None", "AzureBlob", "Artifact", "Snapshot",
"SavedAmlDataset", "Asset".
:paramtype storage_type: str or ~flow.models.DataLocationStorageType
:keyword storage_id:
:paramtype storage_id: str
:keyword uri:
:paramtype uri: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_reference:
:paramtype data_reference: ~flow.models.DataReference
:keyword aml_dataset:
:paramtype aml_dataset: ~flow.models.AmlDataset
:keyword asset_definition:
:paramtype asset_definition: ~flow.models.AssetDefinition
"""
super(DataLocation, self).__init__(**kwargs)
self.storage_type = kwargs.get('storage_type', None)
self.storage_id = kwargs.get('storage_id', None)
self.uri = kwargs.get('uri', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_reference = kwargs.get('data_reference', None)
self.aml_dataset = kwargs.get('aml_dataset', None)
self.asset_definition = kwargs.get('asset_definition', None)
class DataPath(msrest.serialization.Model):
"""DataPath.
:ivar data_store_name:
:vartype data_store_name: str
:ivar relative_path:
:vartype relative_path: str
:ivar sql_data_path:
:vartype sql_data_path: ~flow.models.SqlDataPath
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'sql_data_path': {'key': 'sqlDataPath', 'type': 'SqlDataPath'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
:keyword sql_data_path:
:paramtype sql_data_path: ~flow.models.SqlDataPath
"""
super(DataPath, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
self.sql_data_path = kwargs.get('sql_data_path', None)
class DataPathParameter(msrest.serialization.Model):
"""DataPathParameter.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar default_value:
:vartype default_value: ~flow.models.LegacyDataPath
:ivar is_optional:
:vartype is_optional: bool
:ivar data_type_id:
:vartype data_type_id: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'LegacyDataPath'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword default_value:
:paramtype default_value: ~flow.models.LegacyDataPath
:keyword is_optional:
:paramtype is_optional: bool
:keyword data_type_id:
:paramtype data_type_id: str
"""
super(DataPathParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.documentation = kwargs.get('documentation', None)
self.default_value = kwargs.get('default_value', None)
self.is_optional = kwargs.get('is_optional', None)
self.data_type_id = kwargs.get('data_type_id', None)
class DataPortDto(msrest.serialization.Model):
"""DataPortDto.
:ivar data_port_type: Possible values include: "Input", "Output".
:vartype data_port_type: str or ~flow.models.DataPortType
:ivar data_port_name:
:vartype data_port_name: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_intellectual_property_access_mode: Possible values include: "ReadOnly",
"ReadWrite".
:vartype data_store_intellectual_property_access_mode: str or
~flow.models.IntellectualPropertyAccessMode
:ivar data_store_intellectual_property_publisher:
:vartype data_store_intellectual_property_publisher: str
"""
_attribute_map = {
'data_port_type': {'key': 'dataPortType', 'type': 'str'},
'data_port_name': {'key': 'dataPortName', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_intellectual_property_access_mode': {'key': 'dataStoreIntellectualPropertyAccessMode', 'type': 'str'},
'data_store_intellectual_property_publisher': {'key': 'dataStoreIntellectualPropertyPublisher', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_port_type: Possible values include: "Input", "Output".
:paramtype data_port_type: str or ~flow.models.DataPortType
:keyword data_port_name:
:paramtype data_port_name: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_intellectual_property_access_mode: Possible values include: "ReadOnly",
"ReadWrite".
:paramtype data_store_intellectual_property_access_mode: str or
~flow.models.IntellectualPropertyAccessMode
:keyword data_store_intellectual_property_publisher:
:paramtype data_store_intellectual_property_publisher: str
"""
super(DataPortDto, self).__init__(**kwargs)
self.data_port_type = kwargs.get('data_port_type', None)
self.data_port_name = kwargs.get('data_port_name', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_store_intellectual_property_access_mode = kwargs.get('data_store_intellectual_property_access_mode', None)
self.data_store_intellectual_property_publisher = kwargs.get('data_store_intellectual_property_publisher', None)
class DataReference(msrest.serialization.Model):
"""DataReference.
:ivar type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles",
"AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase",
"Custom", "Hdfs".
:vartype type: str or ~flow.models.DataReferenceType
:ivar azure_blob_reference:
:vartype azure_blob_reference: ~flow.models.AzureBlobReference
:ivar azure_data_lake_reference:
:vartype azure_data_lake_reference: ~flow.models.AzureDataLakeReference
:ivar azure_files_reference:
:vartype azure_files_reference: ~flow.models.AzureFilesReference
:ivar azure_sql_database_reference:
:vartype azure_sql_database_reference: ~flow.models.AzureDatabaseReference
:ivar azure_postgres_database_reference:
:vartype azure_postgres_database_reference: ~flow.models.AzureDatabaseReference
:ivar azure_data_lake_gen2_reference:
:vartype azure_data_lake_gen2_reference: ~flow.models.AzureDataLakeGen2Reference
:ivar dbfs_reference:
:vartype dbfs_reference: ~flow.models.DBFSReference
:ivar azure_my_sql_database_reference:
:vartype azure_my_sql_database_reference: ~flow.models.AzureDatabaseReference
:ivar custom_reference:
:vartype custom_reference: ~flow.models.CustomReference
:ivar hdfs_reference:
:vartype hdfs_reference: ~flow.models.HdfsReference
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'azure_blob_reference': {'key': 'azureBlobReference', 'type': 'AzureBlobReference'},
'azure_data_lake_reference': {'key': 'azureDataLakeReference', 'type': 'AzureDataLakeReference'},
'azure_files_reference': {'key': 'azureFilesReference', 'type': 'AzureFilesReference'},
'azure_sql_database_reference': {'key': 'azureSqlDatabaseReference', 'type': 'AzureDatabaseReference'},
'azure_postgres_database_reference': {'key': 'azurePostgresDatabaseReference', 'type': 'AzureDatabaseReference'},
'azure_data_lake_gen2_reference': {'key': 'azureDataLakeGen2Reference', 'type': 'AzureDataLakeGen2Reference'},
'dbfs_reference': {'key': 'dbfsReference', 'type': 'DBFSReference'},
'azure_my_sql_database_reference': {'key': 'azureMySqlDatabaseReference', 'type': 'AzureDatabaseReference'},
'custom_reference': {'key': 'customReference', 'type': 'CustomReference'},
'hdfs_reference': {'key': 'hdfsReference', 'type': 'HdfsReference'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles",
"AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase",
"Custom", "Hdfs".
:paramtype type: str or ~flow.models.DataReferenceType
:keyword azure_blob_reference:
:paramtype azure_blob_reference: ~flow.models.AzureBlobReference
:keyword azure_data_lake_reference:
:paramtype azure_data_lake_reference: ~flow.models.AzureDataLakeReference
:keyword azure_files_reference:
:paramtype azure_files_reference: ~flow.models.AzureFilesReference
:keyword azure_sql_database_reference:
:paramtype azure_sql_database_reference: ~flow.models.AzureDatabaseReference
:keyword azure_postgres_database_reference:
:paramtype azure_postgres_database_reference: ~flow.models.AzureDatabaseReference
:keyword azure_data_lake_gen2_reference:
:paramtype azure_data_lake_gen2_reference: ~flow.models.AzureDataLakeGen2Reference
:keyword dbfs_reference:
:paramtype dbfs_reference: ~flow.models.DBFSReference
:keyword azure_my_sql_database_reference:
:paramtype azure_my_sql_database_reference: ~flow.models.AzureDatabaseReference
:keyword custom_reference:
:paramtype custom_reference: ~flow.models.CustomReference
:keyword hdfs_reference:
:paramtype hdfs_reference: ~flow.models.HdfsReference
"""
super(DataReference, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.azure_blob_reference = kwargs.get('azure_blob_reference', None)
self.azure_data_lake_reference = kwargs.get('azure_data_lake_reference', None)
self.azure_files_reference = kwargs.get('azure_files_reference', None)
self.azure_sql_database_reference = kwargs.get('azure_sql_database_reference', None)
self.azure_postgres_database_reference = kwargs.get('azure_postgres_database_reference', None)
self.azure_data_lake_gen2_reference = kwargs.get('azure_data_lake_gen2_reference', None)
self.dbfs_reference = kwargs.get('dbfs_reference', None)
self.azure_my_sql_database_reference = kwargs.get('azure_my_sql_database_reference', None)
self.custom_reference = kwargs.get('custom_reference', None)
self.hdfs_reference = kwargs.get('hdfs_reference', None)
class DataReferenceConfiguration(msrest.serialization.Model):
"""DataReferenceConfiguration.
:ivar data_store_name:
:vartype data_store_name: str
:ivar mode: Possible values include: "Mount", "Download", "Upload".
:vartype mode: str or ~flow.models.DataStoreMode
:ivar path_on_data_store:
:vartype path_on_data_store: str
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
'path_on_data_store': {'key': 'pathOnDataStore', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword mode: Possible values include: "Mount", "Download", "Upload".
:paramtype mode: str or ~flow.models.DataStoreMode
:keyword path_on_data_store:
:paramtype path_on_data_store: str
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
"""
super(DataReferenceConfiguration, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
self.mode = kwargs.get('mode', None)
self.path_on_data_store = kwargs.get('path_on_data_store', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.overwrite = kwargs.get('overwrite', None)
class DataSetDefinition(msrest.serialization.Model):
"""DataSetDefinition.
:ivar data_type_short_name:
:vartype data_type_short_name: str
:ivar parameter_name:
:vartype parameter_name: str
:ivar value:
:vartype value: ~flow.models.DataSetDefinitionValue
"""
_attribute_map = {
'data_type_short_name': {'key': 'dataTypeShortName', 'type': 'str'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
'value': {'key': 'value', 'type': 'DataSetDefinitionValue'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_type_short_name:
:paramtype data_type_short_name: str
:keyword parameter_name:
:paramtype parameter_name: str
:keyword value:
:paramtype value: ~flow.models.DataSetDefinitionValue
"""
super(DataSetDefinition, self).__init__(**kwargs)
self.data_type_short_name = kwargs.get('data_type_short_name', None)
self.parameter_name = kwargs.get('parameter_name', None)
self.value = kwargs.get('value', None)
class DataSetDefinitionValue(msrest.serialization.Model):
"""DataSetDefinitionValue.
:ivar literal_value:
:vartype literal_value: ~flow.models.DataPath
:ivar data_set_reference:
:vartype data_set_reference: ~flow.models.RegisteredDataSetReference
:ivar saved_data_set_reference:
:vartype saved_data_set_reference: ~flow.models.SavedDataSetReference
:ivar asset_definition:
:vartype asset_definition: ~flow.models.AssetDefinition
"""
_attribute_map = {
'literal_value': {'key': 'literalValue', 'type': 'DataPath'},
'data_set_reference': {'key': 'dataSetReference', 'type': 'RegisteredDataSetReference'},
'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'SavedDataSetReference'},
'asset_definition': {'key': 'assetDefinition', 'type': 'AssetDefinition'},
}
def __init__(
self,
**kwargs
):
"""
:keyword literal_value:
:paramtype literal_value: ~flow.models.DataPath
:keyword data_set_reference:
:paramtype data_set_reference: ~flow.models.RegisteredDataSetReference
:keyword saved_data_set_reference:
:paramtype saved_data_set_reference: ~flow.models.SavedDataSetReference
:keyword asset_definition:
:paramtype asset_definition: ~flow.models.AssetDefinition
"""
super(DataSetDefinitionValue, self).__init__(**kwargs)
self.literal_value = kwargs.get('literal_value', None)
self.data_set_reference = kwargs.get('data_set_reference', None)
self.saved_data_set_reference = kwargs.get('saved_data_set_reference', None)
self.asset_definition = kwargs.get('asset_definition', None)
class DatasetIdentifier(msrest.serialization.Model):
"""DatasetIdentifier.
:ivar saved_id:
:vartype saved_id: str
:ivar registered_id:
:vartype registered_id: str
:ivar registered_version:
:vartype registered_version: str
"""
_attribute_map = {
'saved_id': {'key': 'savedId', 'type': 'str'},
'registered_id': {'key': 'registeredId', 'type': 'str'},
'registered_version': {'key': 'registeredVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword saved_id:
:paramtype saved_id: str
:keyword registered_id:
:paramtype registered_id: str
:keyword registered_version:
:paramtype registered_version: str
"""
super(DatasetIdentifier, self).__init__(**kwargs)
self.saved_id = kwargs.get('saved_id', None)
self.registered_id = kwargs.get('registered_id', None)
self.registered_version = kwargs.get('registered_version', None)
class DatasetInputDetails(msrest.serialization.Model):
"""DatasetInputDetails.
:ivar input_name:
:vartype input_name: str
:ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:vartype mechanism: str or ~flow.models.DatasetDeliveryMechanism
:ivar path_on_compute:
:vartype path_on_compute: str
"""
_attribute_map = {
'input_name': {'key': 'inputName', 'type': 'str'},
'mechanism': {'key': 'mechanism', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword input_name:
:paramtype input_name: str
:keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:paramtype mechanism: str or ~flow.models.DatasetDeliveryMechanism
:keyword path_on_compute:
:paramtype path_on_compute: str
"""
super(DatasetInputDetails, self).__init__(**kwargs)
self.input_name = kwargs.get('input_name', None)
self.mechanism = kwargs.get('mechanism', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
class DatasetLineage(msrest.serialization.Model):
"""DatasetLineage.
:ivar identifier:
:vartype identifier: ~flow.models.DatasetIdentifier
:ivar consumption_type: Possible values include: "RunInput", "Reference".
:vartype consumption_type: str or ~flow.models.DatasetConsumptionType
:ivar input_details:
:vartype input_details: ~flow.models.DatasetInputDetails
"""
_attribute_map = {
'identifier': {'key': 'identifier', 'type': 'DatasetIdentifier'},
'consumption_type': {'key': 'consumptionType', 'type': 'str'},
'input_details': {'key': 'inputDetails', 'type': 'DatasetInputDetails'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier:
:paramtype identifier: ~flow.models.DatasetIdentifier
:keyword consumption_type: Possible values include: "RunInput", "Reference".
:paramtype consumption_type: str or ~flow.models.DatasetConsumptionType
:keyword input_details:
:paramtype input_details: ~flow.models.DatasetInputDetails
"""
super(DatasetLineage, self).__init__(**kwargs)
self.identifier = kwargs.get('identifier', None)
self.consumption_type = kwargs.get('consumption_type', None)
self.input_details = kwargs.get('input_details', None)
class DatasetOutput(msrest.serialization.Model):
"""DatasetOutput.
:ivar dataset_type: Possible values include: "File", "Tabular".
:vartype dataset_type: str or ~flow.models.DatasetType
:ivar dataset_registration:
:vartype dataset_registration: ~flow.models.DatasetRegistration
:ivar dataset_output_options:
:vartype dataset_output_options: ~flow.models.DatasetOutputOptions
"""
_attribute_map = {
'dataset_type': {'key': 'datasetType', 'type': 'str'},
'dataset_registration': {'key': 'datasetRegistration', 'type': 'DatasetRegistration'},
'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'DatasetOutputOptions'},
}
def __init__(
self,
**kwargs
):
"""
:keyword dataset_type: Possible values include: "File", "Tabular".
:paramtype dataset_type: str or ~flow.models.DatasetType
:keyword dataset_registration:
:paramtype dataset_registration: ~flow.models.DatasetRegistration
:keyword dataset_output_options:
:paramtype dataset_output_options: ~flow.models.DatasetOutputOptions
"""
super(DatasetOutput, self).__init__(**kwargs)
self.dataset_type = kwargs.get('dataset_type', None)
self.dataset_registration = kwargs.get('dataset_registration', None)
self.dataset_output_options = kwargs.get('dataset_output_options', None)
class DatasetOutputDetails(msrest.serialization.Model):
"""DatasetOutputDetails.
:ivar output_name:
:vartype output_name: str
"""
_attribute_map = {
'output_name': {'key': 'outputName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword output_name:
:paramtype output_name: str
"""
super(DatasetOutputDetails, self).__init__(**kwargs)
self.output_name = kwargs.get('output_name', None)
class DatasetOutputOptions(msrest.serialization.Model):
"""DatasetOutputOptions.
:ivar source_globs:
:vartype source_globs: ~flow.models.GlobsOptions
:ivar path_on_datastore:
:vartype path_on_datastore: str
:ivar path_on_datastore_parameter_assignment:
:vartype path_on_datastore_parameter_assignment: ~flow.models.ParameterAssignment
"""
_attribute_map = {
'source_globs': {'key': 'sourceGlobs', 'type': 'GlobsOptions'},
'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'},
'path_on_datastore_parameter_assignment': {'key': 'PathOnDatastoreParameterAssignment', 'type': 'ParameterAssignment'},
}
def __init__(
self,
**kwargs
):
"""
:keyword source_globs:
:paramtype source_globs: ~flow.models.GlobsOptions
:keyword path_on_datastore:
:paramtype path_on_datastore: str
:keyword path_on_datastore_parameter_assignment:
:paramtype path_on_datastore_parameter_assignment: ~flow.models.ParameterAssignment
"""
super(DatasetOutputOptions, self).__init__(**kwargs)
self.source_globs = kwargs.get('source_globs', None)
self.path_on_datastore = kwargs.get('path_on_datastore', None)
self.path_on_datastore_parameter_assignment = kwargs.get('path_on_datastore_parameter_assignment', None)
class DataSetPathParameter(msrest.serialization.Model):
"""DataSetPathParameter.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar default_value:
:vartype default_value: ~flow.models.DataSetDefinitionValue
:ivar is_optional:
:vartype is_optional: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'DataSetDefinitionValue'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword default_value:
:paramtype default_value: ~flow.models.DataSetDefinitionValue
:keyword is_optional:
:paramtype is_optional: bool
"""
super(DataSetPathParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.documentation = kwargs.get('documentation', None)
self.default_value = kwargs.get('default_value', None)
self.is_optional = kwargs.get('is_optional', None)
class DatasetRegistration(msrest.serialization.Model):
"""DatasetRegistration.
:ivar name:
:vartype name: str
:ivar create_new_version:
:vartype create_new_version: bool
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'create_new_version': {'key': 'createNewVersion', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword create_new_version:
:paramtype create_new_version: bool
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(DatasetRegistration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.create_new_version = kwargs.get('create_new_version', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.additional_transformations = kwargs.get('additional_transformations', None)
class DatasetRegistrationOptions(msrest.serialization.Model):
"""DatasetRegistrationOptions.
:ivar additional_transformation:
:vartype additional_transformation: str
"""
_attribute_map = {
'additional_transformation': {'key': 'additionalTransformation', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword additional_transformation:
:paramtype additional_transformation: str
"""
super(DatasetRegistrationOptions, self).__init__(**kwargs)
self.additional_transformation = kwargs.get('additional_transformation', None)
class DataSettings(msrest.serialization.Model):
"""DataSettings.
:ivar target_column_name:
:vartype target_column_name: str
:ivar weight_column_name:
:vartype weight_column_name: str
:ivar positive_label:
:vartype positive_label: str
:ivar validation_data:
:vartype validation_data: ~flow.models.ValidationDataSettings
:ivar test_data:
:vartype test_data: ~flow.models.TestDataSettings
"""
_attribute_map = {
'target_column_name': {'key': 'targetColumnName', 'type': 'str'},
'weight_column_name': {'key': 'weightColumnName', 'type': 'str'},
'positive_label': {'key': 'positiveLabel', 'type': 'str'},
'validation_data': {'key': 'validationData', 'type': 'ValidationDataSettings'},
'test_data': {'key': 'testData', 'type': 'TestDataSettings'},
}
def __init__(
self,
**kwargs
):
"""
:keyword target_column_name:
:paramtype target_column_name: str
:keyword weight_column_name:
:paramtype weight_column_name: str
:keyword positive_label:
:paramtype positive_label: str
:keyword validation_data:
:paramtype validation_data: ~flow.models.ValidationDataSettings
:keyword test_data:
:paramtype test_data: ~flow.models.TestDataSettings
"""
super(DataSettings, self).__init__(**kwargs)
self.target_column_name = kwargs.get('target_column_name', None)
self.weight_column_name = kwargs.get('weight_column_name', None)
self.positive_label = kwargs.get('positive_label', None)
self.validation_data = kwargs.get('validation_data', None)
self.test_data = kwargs.get('test_data', None)
class DatastoreSetting(msrest.serialization.Model):
"""DatastoreSetting.
:ivar data_store_name:
:vartype data_store_name: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
"""
super(DatastoreSetting, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
class DataTransferCloudConfiguration(msrest.serialization.Model):
"""DataTransferCloudConfiguration.
:ivar allow_overwrite:
:vartype allow_overwrite: bool
"""
_attribute_map = {
'allow_overwrite': {'key': 'AllowOverwrite', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword allow_overwrite:
:paramtype allow_overwrite: bool
"""
super(DataTransferCloudConfiguration, self).__init__(**kwargs)
self.allow_overwrite = kwargs.get('allow_overwrite', None)
class DataTransferSink(msrest.serialization.Model):
"""DataTransferSink.
:ivar type: Possible values include: "DataBase", "FileSystem".
:vartype type: str or ~flow.models.DataTransferStorageType
:ivar file_system:
:vartype file_system: ~flow.models.FileSystem
:ivar database_sink:
:vartype database_sink: ~flow.models.DatabaseSink
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'file_system': {'key': 'fileSystem', 'type': 'FileSystem'},
'database_sink': {'key': 'databaseSink', 'type': 'DatabaseSink'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "DataBase", "FileSystem".
:paramtype type: str or ~flow.models.DataTransferStorageType
:keyword file_system:
:paramtype file_system: ~flow.models.FileSystem
:keyword database_sink:
:paramtype database_sink: ~flow.models.DatabaseSink
"""
super(DataTransferSink, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.file_system = kwargs.get('file_system', None)
self.database_sink = kwargs.get('database_sink', None)
class DataTransferSource(msrest.serialization.Model):
"""DataTransferSource.
:ivar type: Possible values include: "DataBase", "FileSystem".
:vartype type: str or ~flow.models.DataTransferStorageType
:ivar file_system:
:vartype file_system: ~flow.models.FileSystem
:ivar database_source:
:vartype database_source: ~flow.models.DatabaseSource
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'file_system': {'key': 'fileSystem', 'type': 'FileSystem'},
'database_source': {'key': 'databaseSource', 'type': 'DatabaseSource'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "DataBase", "FileSystem".
:paramtype type: str or ~flow.models.DataTransferStorageType
:keyword file_system:
:paramtype file_system: ~flow.models.FileSystem
:keyword database_source:
:paramtype database_source: ~flow.models.DatabaseSource
"""
super(DataTransferSource, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.file_system = kwargs.get('file_system', None)
self.database_source = kwargs.get('database_source', None)
class DataTransferV2CloudSetting(msrest.serialization.Model):
"""DataTransferV2CloudSetting.
:ivar task_type: Possible values include: "ImportData", "ExportData", "CopyData".
:vartype task_type: str or ~flow.models.DataTransferTaskType
:ivar compute_name:
:vartype compute_name: str
:ivar copy_data_task:
:vartype copy_data_task: ~flow.models.CopyDataTask
:ivar import_data_task:
:vartype import_data_task: ~flow.models.ImportDataTask
:ivar export_data_task:
:vartype export_data_task: ~flow.models.ExportDataTask
:ivar data_transfer_sources: This is a dictionary.
:vartype data_transfer_sources: dict[str, ~flow.models.DataTransferSource]
:ivar data_transfer_sinks: This is a dictionary.
:vartype data_transfer_sinks: dict[str, ~flow.models.DataTransferSink]
:ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:vartype data_copy_mode: str or ~flow.models.DataCopyMode
"""
_attribute_map = {
'task_type': {'key': 'taskType', 'type': 'str'},
'compute_name': {'key': 'ComputeName', 'type': 'str'},
'copy_data_task': {'key': 'CopyDataTask', 'type': 'CopyDataTask'},
'import_data_task': {'key': 'ImportDataTask', 'type': 'ImportDataTask'},
'export_data_task': {'key': 'ExportDataTask', 'type': 'ExportDataTask'},
'data_transfer_sources': {'key': 'DataTransferSources', 'type': '{DataTransferSource}'},
'data_transfer_sinks': {'key': 'DataTransferSinks', 'type': '{DataTransferSink}'},
'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword task_type: Possible values include: "ImportData", "ExportData", "CopyData".
:paramtype task_type: str or ~flow.models.DataTransferTaskType
:keyword compute_name:
:paramtype compute_name: str
:keyword copy_data_task:
:paramtype copy_data_task: ~flow.models.CopyDataTask
:keyword import_data_task:
:paramtype import_data_task: ~flow.models.ImportDataTask
:keyword export_data_task:
:paramtype export_data_task: ~flow.models.ExportDataTask
:keyword data_transfer_sources: This is a dictionary.
:paramtype data_transfer_sources: dict[str, ~flow.models.DataTransferSource]
:keyword data_transfer_sinks: This is a dictionary.
:paramtype data_transfer_sinks: dict[str, ~flow.models.DataTransferSink]
:keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:paramtype data_copy_mode: str or ~flow.models.DataCopyMode
"""
super(DataTransferV2CloudSetting, self).__init__(**kwargs)
self.task_type = kwargs.get('task_type', None)
self.compute_name = kwargs.get('compute_name', None)
self.copy_data_task = kwargs.get('copy_data_task', None)
self.import_data_task = kwargs.get('import_data_task', None)
self.export_data_task = kwargs.get('export_data_task', None)
self.data_transfer_sources = kwargs.get('data_transfer_sources', None)
self.data_transfer_sinks = kwargs.get('data_transfer_sinks', None)
self.data_copy_mode = kwargs.get('data_copy_mode', None)
class DataTypeCreationInfo(msrest.serialization.Model):
"""DataTypeCreationInfo.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar is_directory:
:vartype is_directory: bool
:ivar file_extension:
:vartype file_extension: str
:ivar parent_data_type_ids:
:vartype parent_data_type_ids: list[str]
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'is_directory': {'key': 'isDirectory', 'type': 'bool'},
'file_extension': {'key': 'fileExtension', 'type': 'str'},
'parent_data_type_ids': {'key': 'parentDataTypeIds', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword is_directory:
:paramtype is_directory: bool
:keyword file_extension:
:paramtype file_extension: str
:keyword parent_data_type_ids:
:paramtype parent_data_type_ids: list[str]
"""
super(DataTypeCreationInfo, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.is_directory = kwargs.get('is_directory', None)
self.file_extension = kwargs.get('file_extension', None)
self.parent_data_type_ids = kwargs.get('parent_data_type_ids', None)
class DBFSReference(msrest.serialization.Model):
"""DBFSReference.
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(DBFSReference, self).__init__(**kwargs)
self.relative_path = kwargs.get('relative_path', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class DbfsStorageInfoDto(msrest.serialization.Model):
"""DbfsStorageInfoDto.
:ivar destination:
:vartype destination: str
"""
_attribute_map = {
'destination': {'key': 'destination', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword destination:
:paramtype destination: str
"""
super(DbfsStorageInfoDto, self).__init__(**kwargs)
self.destination = kwargs.get('destination', None)
class DebugInfoResponse(msrest.serialization.Model):
"""Internal debugging information not intended for external clients.
:ivar type: The type.
:vartype type: str
:ivar message: The message.
:vartype message: str
:ivar stack_trace: The stack trace.
:vartype stack_trace: str
:ivar inner_exception: Internal debugging information not intended for external clients.
:vartype inner_exception: ~flow.models.DebugInfoResponse
:ivar data: This is a dictionary.
:vartype data: dict[str, any]
:ivar error_response: The error response.
:vartype error_response: ~flow.models.ErrorResponse
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'stack_trace': {'key': 'stackTrace', 'type': 'str'},
'inner_exception': {'key': 'innerException', 'type': 'DebugInfoResponse'},
'data': {'key': 'data', 'type': '{object}'},
'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: The type.
:paramtype type: str
:keyword message: The message.
:paramtype message: str
:keyword stack_trace: The stack trace.
:paramtype stack_trace: str
:keyword inner_exception: Internal debugging information not intended for external clients.
:paramtype inner_exception: ~flow.models.DebugInfoResponse
:keyword data: This is a dictionary.
:paramtype data: dict[str, any]
:keyword error_response: The error response.
:paramtype error_response: ~flow.models.ErrorResponse
"""
super(DebugInfoResponse, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.message = kwargs.get('message', None)
self.stack_trace = kwargs.get('stack_trace', None)
self.inner_exception = kwargs.get('inner_exception', None)
self.data = kwargs.get('data', None)
self.error_response = kwargs.get('error_response', None)
class DeployFlowRequest(msrest.serialization.Model):
"""DeployFlowRequest.
:ivar source_resource_id:
:vartype source_resource_id: str
:ivar source_flow_run_id:
:vartype source_flow_run_id: str
:ivar source_flow_id:
:vartype source_flow_id: str
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar flow_submit_run_settings:
:vartype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:ivar output_names_included_in_endpoint_response:
:vartype output_names_included_in_endpoint_response: list[str]
:ivar endpoint_name:
:vartype endpoint_name: str
:ivar endpoint_description:
:vartype endpoint_description: str
:ivar auth_mode: Possible values include: "AMLToken", "Key", "AADToken".
:vartype auth_mode: str or ~flow.models.EndpointAuthMode
:ivar identity:
:vartype identity: ~flow.models.ManagedServiceIdentity
:ivar endpoint_tags: This is a dictionary.
:vartype endpoint_tags: dict[str, str]
:ivar enable_public_network_access:
:vartype enable_public_network_access: bool
:ivar connection_overrides:
:vartype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:ivar use_workspace_connection:
:vartype use_workspace_connection: bool
:ivar deployment_name:
:vartype deployment_name: str
:ivar environment:
:vartype environment: str
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, str]
:ivar deployment_tags: This is a dictionary.
:vartype deployment_tags: dict[str, str]
:ivar app_insights_enabled:
:vartype app_insights_enabled: bool
:ivar enable_model_data_collector:
:vartype enable_model_data_collector: bool
:ivar skip_update_traffic_to_full:
:vartype skip_update_traffic_to_full: bool
:ivar enable_streaming_response:
:vartype enable_streaming_response: bool
:ivar instance_type:
:vartype instance_type: str
:ivar instance_count:
:vartype instance_count: int
:ivar auto_grant_connection_permission:
:vartype auto_grant_connection_permission: bool
"""
_attribute_map = {
'source_resource_id': {'key': 'sourceResourceId', 'type': 'str'},
'source_flow_run_id': {'key': 'sourceFlowRunId', 'type': 'str'},
'source_flow_id': {'key': 'sourceFlowId', 'type': 'str'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'flow_submit_run_settings': {'key': 'flowSubmitRunSettings', 'type': 'FlowSubmitRunSettings'},
'output_names_included_in_endpoint_response': {'key': 'outputNamesIncludedInEndpointResponse', 'type': '[str]'},
'endpoint_name': {'key': 'endpointName', 'type': 'str'},
'endpoint_description': {'key': 'endpointDescription', 'type': 'str'},
'auth_mode': {'key': 'authMode', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'endpoint_tags': {'key': 'endpointTags', 'type': '{str}'},
'enable_public_network_access': {'key': 'enablePublicNetworkAccess', 'type': 'bool'},
'connection_overrides': {'key': 'connectionOverrides', 'type': '[ConnectionOverrideSetting]'},
'use_workspace_connection': {'key': 'useWorkspaceConnection', 'type': 'bool'},
'deployment_name': {'key': 'deploymentName', 'type': 'str'},
'environment': {'key': 'environment', 'type': 'str'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'deployment_tags': {'key': 'deploymentTags', 'type': '{str}'},
'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
'enable_model_data_collector': {'key': 'enableModelDataCollector', 'type': 'bool'},
'skip_update_traffic_to_full': {'key': 'skipUpdateTrafficToFull', 'type': 'bool'},
'enable_streaming_response': {'key': 'enableStreamingResponse', 'type': 'bool'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'auto_grant_connection_permission': {'key': 'autoGrantConnectionPermission', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword source_resource_id:
:paramtype source_resource_id: str
:keyword source_flow_run_id:
:paramtype source_flow_run_id: str
:keyword source_flow_id:
:paramtype source_flow_id: str
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword flow_submit_run_settings:
:paramtype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:keyword output_names_included_in_endpoint_response:
:paramtype output_names_included_in_endpoint_response: list[str]
:keyword endpoint_name:
:paramtype endpoint_name: str
:keyword endpoint_description:
:paramtype endpoint_description: str
:keyword auth_mode: Possible values include: "AMLToken", "Key", "AADToken".
:paramtype auth_mode: str or ~flow.models.EndpointAuthMode
:keyword identity:
:paramtype identity: ~flow.models.ManagedServiceIdentity
:keyword endpoint_tags: This is a dictionary.
:paramtype endpoint_tags: dict[str, str]
:keyword enable_public_network_access:
:paramtype enable_public_network_access: bool
:keyword connection_overrides:
:paramtype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:keyword use_workspace_connection:
:paramtype use_workspace_connection: bool
:keyword deployment_name:
:paramtype deployment_name: str
:keyword environment:
:paramtype environment: str
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, str]
:keyword deployment_tags: This is a dictionary.
:paramtype deployment_tags: dict[str, str]
:keyword app_insights_enabled:
:paramtype app_insights_enabled: bool
:keyword enable_model_data_collector:
:paramtype enable_model_data_collector: bool
:keyword skip_update_traffic_to_full:
:paramtype skip_update_traffic_to_full: bool
:keyword enable_streaming_response:
:paramtype enable_streaming_response: bool
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_count:
:paramtype instance_count: int
:keyword auto_grant_connection_permission:
:paramtype auto_grant_connection_permission: bool
"""
super(DeployFlowRequest, self).__init__(**kwargs)
self.source_resource_id = kwargs.get('source_resource_id', None)
self.source_flow_run_id = kwargs.get('source_flow_run_id', None)
self.source_flow_id = kwargs.get('source_flow_id', None)
self.flow = kwargs.get('flow', None)
self.flow_type = kwargs.get('flow_type', None)
self.flow_submit_run_settings = kwargs.get('flow_submit_run_settings', None)
self.output_names_included_in_endpoint_response = kwargs.get('output_names_included_in_endpoint_response', None)
self.endpoint_name = kwargs.get('endpoint_name', None)
self.endpoint_description = kwargs.get('endpoint_description', None)
self.auth_mode = kwargs.get('auth_mode', None)
self.identity = kwargs.get('identity', None)
self.endpoint_tags = kwargs.get('endpoint_tags', None)
self.enable_public_network_access = kwargs.get('enable_public_network_access', None)
self.connection_overrides = kwargs.get('connection_overrides', None)
self.use_workspace_connection = kwargs.get('use_workspace_connection', None)
self.deployment_name = kwargs.get('deployment_name', None)
self.environment = kwargs.get('environment', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.deployment_tags = kwargs.get('deployment_tags', None)
self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
self.enable_model_data_collector = kwargs.get('enable_model_data_collector', None)
self.skip_update_traffic_to_full = kwargs.get('skip_update_traffic_to_full', None)
self.enable_streaming_response = kwargs.get('enable_streaming_response', None)
self.instance_type = kwargs.get('instance_type', None)
self.instance_count = kwargs.get('instance_count', None)
self.auto_grant_connection_permission = kwargs.get('auto_grant_connection_permission', None)
class DeploymentInfo(msrest.serialization.Model):
"""DeploymentInfo.
:ivar operation_id:
:vartype operation_id: str
:ivar service_id:
:vartype service_id: str
:ivar service_name:
:vartype service_name: str
:ivar status_detail:
:vartype status_detail: str
"""
_attribute_map = {
'operation_id': {'key': 'operationId', 'type': 'str'},
'service_id': {'key': 'serviceId', 'type': 'str'},
'service_name': {'key': 'serviceName', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword operation_id:
:paramtype operation_id: str
:keyword service_id:
:paramtype service_id: str
:keyword service_name:
:paramtype service_name: str
:keyword status_detail:
:paramtype status_detail: str
"""
super(DeploymentInfo, self).__init__(**kwargs)
self.operation_id = kwargs.get('operation_id', None)
self.service_id = kwargs.get('service_id', None)
self.service_name = kwargs.get('service_name', None)
self.status_detail = kwargs.get('status_detail', None)
class DistributionConfiguration(msrest.serialization.Model):
"""DistributionConfiguration.
:ivar distribution_type: Possible values include: "PyTorch", "TensorFlow", "Mpi", "Ray".
:vartype distribution_type: str or ~flow.models.DistributionType
"""
_attribute_map = {
'distribution_type': {'key': 'distributionType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword distribution_type: Possible values include: "PyTorch", "TensorFlow", "Mpi", "Ray".
:paramtype distribution_type: str or ~flow.models.DistributionType
"""
super(DistributionConfiguration, self).__init__(**kwargs)
self.distribution_type = kwargs.get('distribution_type', None)
class DistributionParameter(msrest.serialization.Model):
"""DistributionParameter.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar description:
:vartype description: str
:ivar input_type: Possible values include: "Text", "Number".
:vartype input_type: str or ~flow.models.DistributionParameterEnum
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'input_type': {'key': 'inputType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword description:
:paramtype description: str
:keyword input_type: Possible values include: "Text", "Number".
:paramtype input_type: str or ~flow.models.DistributionParameterEnum
"""
super(DistributionParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.label = kwargs.get('label', None)
self.description = kwargs.get('description', None)
self.input_type = kwargs.get('input_type', None)
class DockerBuildContext(msrest.serialization.Model):
"""DockerBuildContext.
:ivar location_type: Possible values include: "Git", "StorageAccount".
:vartype location_type: str or ~flow.models.BuildContextLocationType
:ivar location:
:vartype location: str
:ivar dockerfile_path:
:vartype dockerfile_path: str
"""
_attribute_map = {
'location_type': {'key': 'locationType', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'dockerfile_path': {'key': 'dockerfilePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword location_type: Possible values include: "Git", "StorageAccount".
:paramtype location_type: str or ~flow.models.BuildContextLocationType
:keyword location:
:paramtype location: str
:keyword dockerfile_path:
:paramtype dockerfile_path: str
"""
super(DockerBuildContext, self).__init__(**kwargs)
self.location_type = kwargs.get('location_type', None)
self.location = kwargs.get('location', None)
self.dockerfile_path = kwargs.get('dockerfile_path', "Dockerfile")
class DockerConfiguration(msrest.serialization.Model):
"""DockerConfiguration.
:ivar use_docker:
:vartype use_docker: bool
:ivar shared_volumes:
:vartype shared_volumes: bool
:ivar arguments:
:vartype arguments: list[str]
"""
_attribute_map = {
'use_docker': {'key': 'useDocker', 'type': 'bool'},
'shared_volumes': {'key': 'sharedVolumes', 'type': 'bool'},
'arguments': {'key': 'arguments', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword use_docker:
:paramtype use_docker: bool
:keyword shared_volumes:
:paramtype shared_volumes: bool
:keyword arguments:
:paramtype arguments: list[str]
"""
super(DockerConfiguration, self).__init__(**kwargs)
self.use_docker = kwargs.get('use_docker', None)
self.shared_volumes = kwargs.get('shared_volumes', None)
self.arguments = kwargs.get('arguments', None)
class DockerImagePlatform(msrest.serialization.Model):
"""DockerImagePlatform.
:ivar os:
:vartype os: str
:ivar architecture:
:vartype architecture: str
"""
_attribute_map = {
'os': {'key': 'os', 'type': 'str'},
'architecture': {'key': 'architecture', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword os:
:paramtype os: str
:keyword architecture:
:paramtype architecture: str
"""
super(DockerImagePlatform, self).__init__(**kwargs)
self.os = kwargs.get('os', None)
self.architecture = kwargs.get('architecture', None)
class DockerSection(msrest.serialization.Model):
"""DockerSection.
:ivar base_image:
:vartype base_image: str
:ivar platform:
:vartype platform: ~flow.models.DockerImagePlatform
:ivar base_dockerfile:
:vartype base_dockerfile: str
:ivar build_context:
:vartype build_context: ~flow.models.DockerBuildContext
:ivar base_image_registry:
:vartype base_image_registry: ~flow.models.ContainerRegistry
"""
_attribute_map = {
'base_image': {'key': 'baseImage', 'type': 'str'},
'platform': {'key': 'platform', 'type': 'DockerImagePlatform'},
'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
'build_context': {'key': 'buildContext', 'type': 'DockerBuildContext'},
'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistry'},
}
def __init__(
self,
**kwargs
):
"""
:keyword base_image:
:paramtype base_image: str
:keyword platform:
:paramtype platform: ~flow.models.DockerImagePlatform
:keyword base_dockerfile:
:paramtype base_dockerfile: str
:keyword build_context:
:paramtype build_context: ~flow.models.DockerBuildContext
:keyword base_image_registry:
:paramtype base_image_registry: ~flow.models.ContainerRegistry
"""
super(DockerSection, self).__init__(**kwargs)
self.base_image = kwargs.get('base_image', None)
self.platform = kwargs.get('platform', None)
self.base_dockerfile = kwargs.get('base_dockerfile', None)
self.build_context = kwargs.get('build_context', None)
self.base_image_registry = kwargs.get('base_image_registry', None)
class DockerSettingConfiguration(msrest.serialization.Model):
"""DockerSettingConfiguration.
:ivar use_docker:
:vartype use_docker: bool
:ivar shared_volumes:
:vartype shared_volumes: bool
:ivar shm_size:
:vartype shm_size: str
:ivar arguments:
:vartype arguments: list[str]
"""
_attribute_map = {
'use_docker': {'key': 'useDocker', 'type': 'bool'},
'shared_volumes': {'key': 'sharedVolumes', 'type': 'bool'},
'shm_size': {'key': 'shmSize', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword use_docker:
:paramtype use_docker: bool
:keyword shared_volumes:
:paramtype shared_volumes: bool
:keyword shm_size:
:paramtype shm_size: str
:keyword arguments:
:paramtype arguments: list[str]
"""
super(DockerSettingConfiguration, self).__init__(**kwargs)
self.use_docker = kwargs.get('use_docker', None)
self.shared_volumes = kwargs.get('shared_volumes', None)
self.shm_size = kwargs.get('shm_size', None)
self.arguments = kwargs.get('arguments', None)
class DoWhileControlFlowInfo(msrest.serialization.Model):
"""DoWhileControlFlowInfo.
:ivar output_port_name_to_input_port_names_mapping: Dictionary of
<components·1sqg750·schemas·dowhilecontrolflowinfo·properties·outputportnametoinputportnamesmapping·additionalproperties>.
:vartype output_port_name_to_input_port_names_mapping: dict[str, list[str]]
:ivar condition_output_port_name:
:vartype condition_output_port_name: str
:ivar run_settings:
:vartype run_settings: ~flow.models.DoWhileControlFlowRunSettings
"""
_attribute_map = {
'output_port_name_to_input_port_names_mapping': {'key': 'outputPortNameToInputPortNamesMapping', 'type': '{[str]}'},
'condition_output_port_name': {'key': 'conditionOutputPortName', 'type': 'str'},
'run_settings': {'key': 'runSettings', 'type': 'DoWhileControlFlowRunSettings'},
}
def __init__(
self,
**kwargs
):
"""
:keyword output_port_name_to_input_port_names_mapping: Dictionary of
<components·1sqg750·schemas·dowhilecontrolflowinfo·properties·outputportnametoinputportnamesmapping·additionalproperties>.
:paramtype output_port_name_to_input_port_names_mapping: dict[str, list[str]]
:keyword condition_output_port_name:
:paramtype condition_output_port_name: str
:keyword run_settings:
:paramtype run_settings: ~flow.models.DoWhileControlFlowRunSettings
"""
super(DoWhileControlFlowInfo, self).__init__(**kwargs)
self.output_port_name_to_input_port_names_mapping = kwargs.get('output_port_name_to_input_port_names_mapping', None)
self.condition_output_port_name = kwargs.get('condition_output_port_name', None)
self.run_settings = kwargs.get('run_settings', None)
class DoWhileControlFlowRunSettings(msrest.serialization.Model):
"""DoWhileControlFlowRunSettings.
:ivar max_loop_iteration_count:
:vartype max_loop_iteration_count: ~flow.models.ParameterAssignment
"""
_attribute_map = {
'max_loop_iteration_count': {'key': 'maxLoopIterationCount', 'type': 'ParameterAssignment'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_loop_iteration_count:
:paramtype max_loop_iteration_count: ~flow.models.ParameterAssignment
"""
super(DoWhileControlFlowRunSettings, self).__init__(**kwargs)
self.max_loop_iteration_count = kwargs.get('max_loop_iteration_count', None)
class DownloadResourceInfo(msrest.serialization.Model):
"""DownloadResourceInfo.
:ivar download_url:
:vartype download_url: str
:ivar size:
:vartype size: long
"""
_attribute_map = {
'download_url': {'key': 'downloadUrl', 'type': 'str'},
'size': {'key': 'size', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
"""
:keyword download_url:
:paramtype download_url: str
:keyword size:
:paramtype size: long
"""
super(DownloadResourceInfo, self).__init__(**kwargs)
self.download_url = kwargs.get('download_url', None)
self.size = kwargs.get('size', None)
class EndpointSetting(msrest.serialization.Model):
"""EndpointSetting.
:ivar type:
:vartype type: str
:ivar port:
:vartype port: int
:ivar ssl_thumbprint:
:vartype ssl_thumbprint: str
:ivar endpoint:
:vartype endpoint: str
:ivar proxy_endpoint:
:vartype proxy_endpoint: str
:ivar status:
:vartype status: str
:ivar error_message:
:vartype error_message: str
:ivar enabled:
:vartype enabled: bool
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar nodes:
:vartype nodes: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
'ssl_thumbprint': {'key': 'sslThumbprint', 'type': 'str'},
'endpoint': {'key': 'endpoint', 'type': 'str'},
'proxy_endpoint': {'key': 'proxyEndpoint', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
'enabled': {'key': 'enabled', 'type': 'bool'},
'properties': {'key': 'properties', 'type': '{str}'},
'nodes': {'key': 'nodes', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type:
:paramtype type: str
:keyword port:
:paramtype port: int
:keyword ssl_thumbprint:
:paramtype ssl_thumbprint: str
:keyword endpoint:
:paramtype endpoint: str
:keyword proxy_endpoint:
:paramtype proxy_endpoint: str
:keyword status:
:paramtype status: str
:keyword error_message:
:paramtype error_message: str
:keyword enabled:
:paramtype enabled: bool
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword nodes:
:paramtype nodes: str
"""
super(EndpointSetting, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.port = kwargs.get('port', None)
self.ssl_thumbprint = kwargs.get('ssl_thumbprint', None)
self.endpoint = kwargs.get('endpoint', None)
self.proxy_endpoint = kwargs.get('proxy_endpoint', None)
self.status = kwargs.get('status', None)
self.error_message = kwargs.get('error_message', None)
self.enabled = kwargs.get('enabled', None)
self.properties = kwargs.get('properties', None)
self.nodes = kwargs.get('nodes', None)
class EntityInterface(msrest.serialization.Model):
"""EntityInterface.
:ivar parameters:
:vartype parameters: list[~flow.models.Parameter]
:ivar ports:
:vartype ports: ~flow.models.NodePortInterface
:ivar metadata_parameters:
:vartype metadata_parameters: list[~flow.models.Parameter]
:ivar data_path_parameters:
:vartype data_path_parameters: list[~flow.models.DataPathParameter]
:ivar data_path_parameter_list:
:vartype data_path_parameter_list: list[~flow.models.DataSetPathParameter]
:ivar asset_output_settings_parameter_list:
:vartype asset_output_settings_parameter_list: list[~flow.models.AssetOutputSettingsParameter]
"""
_attribute_map = {
'parameters': {'key': 'parameters', 'type': '[Parameter]'},
'ports': {'key': 'ports', 'type': 'NodePortInterface'},
'metadata_parameters': {'key': 'metadataParameters', 'type': '[Parameter]'},
'data_path_parameters': {'key': 'dataPathParameters', 'type': '[DataPathParameter]'},
'data_path_parameter_list': {'key': 'dataPathParameterList', 'type': '[DataSetPathParameter]'},
'asset_output_settings_parameter_list': {'key': 'AssetOutputSettingsParameterList', 'type': '[AssetOutputSettingsParameter]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword parameters:
:paramtype parameters: list[~flow.models.Parameter]
:keyword ports:
:paramtype ports: ~flow.models.NodePortInterface
:keyword metadata_parameters:
:paramtype metadata_parameters: list[~flow.models.Parameter]
:keyword data_path_parameters:
:paramtype data_path_parameters: list[~flow.models.DataPathParameter]
:keyword data_path_parameter_list:
:paramtype data_path_parameter_list: list[~flow.models.DataSetPathParameter]
:keyword asset_output_settings_parameter_list:
:paramtype asset_output_settings_parameter_list:
list[~flow.models.AssetOutputSettingsParameter]
"""
super(EntityInterface, self).__init__(**kwargs)
self.parameters = kwargs.get('parameters', None)
self.ports = kwargs.get('ports', None)
self.metadata_parameters = kwargs.get('metadata_parameters', None)
self.data_path_parameters = kwargs.get('data_path_parameters', None)
self.data_path_parameter_list = kwargs.get('data_path_parameter_list', None)
self.asset_output_settings_parameter_list = kwargs.get('asset_output_settings_parameter_list', None)
class EntrySetting(msrest.serialization.Model):
"""EntrySetting.
:ivar file:
:vartype file: str
:ivar class_name:
:vartype class_name: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'class_name': {'key': 'className', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword class_name:
:paramtype class_name: str
"""
super(EntrySetting, self).__init__(**kwargs)
self.file = kwargs.get('file', None)
self.class_name = kwargs.get('class_name', None)
class EnumParameterRule(msrest.serialization.Model):
"""EnumParameterRule.
:ivar valid_values:
:vartype valid_values: list[str]
"""
_attribute_map = {
'valid_values': {'key': 'validValues', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword valid_values:
:paramtype valid_values: list[str]
"""
super(EnumParameterRule, self).__init__(**kwargs)
self.valid_values = kwargs.get('valid_values', None)
class EnvironmentConfiguration(msrest.serialization.Model):
"""EnvironmentConfiguration.
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar use_environment_definition:
:vartype use_environment_definition: bool
:ivar environment_definition_string:
:vartype environment_definition_string: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'use_environment_definition': {'key': 'useEnvironmentDefinition', 'type': 'bool'},
'environment_definition_string': {'key': 'environmentDefinitionString', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword use_environment_definition:
:paramtype use_environment_definition: bool
:keyword environment_definition_string:
:paramtype environment_definition_string: str
"""
super(EnvironmentConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
self.use_environment_definition = kwargs.get('use_environment_definition', None)
self.environment_definition_string = kwargs.get('environment_definition_string', None)
class EnvironmentDefinition(msrest.serialization.Model):
"""EnvironmentDefinition.
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar asset_id:
:vartype asset_id: str
:ivar auto_rebuild:
:vartype auto_rebuild: bool
:ivar python:
:vartype python: ~flow.models.PythonSection
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar docker:
:vartype docker: ~flow.models.DockerSection
:ivar spark:
:vartype spark: ~flow.models.SparkSection
:ivar r:
:vartype r: ~flow.models.RSection
:ivar inferencing_stack_version:
:vartype inferencing_stack_version: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'asset_id': {'key': 'assetId', 'type': 'str'},
'auto_rebuild': {'key': 'autoRebuild', 'type': 'bool'},
'python': {'key': 'python', 'type': 'PythonSection'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'docker': {'key': 'docker', 'type': 'DockerSection'},
'spark': {'key': 'spark', 'type': 'SparkSection'},
'r': {'key': 'r', 'type': 'RSection'},
'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword asset_id:
:paramtype asset_id: str
:keyword auto_rebuild:
:paramtype auto_rebuild: bool
:keyword python:
:paramtype python: ~flow.models.PythonSection
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword docker:
:paramtype docker: ~flow.models.DockerSection
:keyword spark:
:paramtype spark: ~flow.models.SparkSection
:keyword r:
:paramtype r: ~flow.models.RSection
:keyword inferencing_stack_version:
:paramtype inferencing_stack_version: str
"""
super(EnvironmentDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
self.asset_id = kwargs.get('asset_id', None)
self.auto_rebuild = kwargs.get('auto_rebuild', None)
self.python = kwargs.get('python', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.docker = kwargs.get('docker', None)
self.spark = kwargs.get('spark', None)
self.r = kwargs.get('r', None)
self.inferencing_stack_version = kwargs.get('inferencing_stack_version', None)
class EnvironmentDefinitionDto(msrest.serialization.Model):
"""EnvironmentDefinitionDto.
:ivar environment_name:
:vartype environment_name: str
:ivar environment_version:
:vartype environment_version: str
:ivar intellectual_property_publisher:
:vartype intellectual_property_publisher: str
"""
_attribute_map = {
'environment_name': {'key': 'environmentName', 'type': 'str'},
'environment_version': {'key': 'environmentVersion', 'type': 'str'},
'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword environment_name:
:paramtype environment_name: str
:keyword environment_version:
:paramtype environment_version: str
:keyword intellectual_property_publisher:
:paramtype intellectual_property_publisher: str
"""
super(EnvironmentDefinitionDto, self).__init__(**kwargs)
self.environment_name = kwargs.get('environment_name', None)
self.environment_version = kwargs.get('environment_version', None)
self.intellectual_property_publisher = kwargs.get('intellectual_property_publisher', None)
class EPRPipelineRunErrorClassificationRequest(msrest.serialization.Model):
"""EPRPipelineRunErrorClassificationRequest.
:ivar root_run_id:
:vartype root_run_id: str
:ivar run_id:
:vartype run_id: str
:ivar task_result:
:vartype task_result: str
:ivar failure_type:
:vartype failure_type: str
:ivar failure_name:
:vartype failure_name: str
:ivar responsible_team:
:vartype responsible_team: str
"""
_attribute_map = {
'root_run_id': {'key': 'rootRunId', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'task_result': {'key': 'taskResult', 'type': 'str'},
'failure_type': {'key': 'failureType', 'type': 'str'},
'failure_name': {'key': 'failureName', 'type': 'str'},
'responsible_team': {'key': 'responsibleTeam', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword root_run_id:
:paramtype root_run_id: str
:keyword run_id:
:paramtype run_id: str
:keyword task_result:
:paramtype task_result: str
:keyword failure_type:
:paramtype failure_type: str
:keyword failure_name:
:paramtype failure_name: str
:keyword responsible_team:
:paramtype responsible_team: str
"""
super(EPRPipelineRunErrorClassificationRequest, self).__init__(**kwargs)
self.root_run_id = kwargs.get('root_run_id', None)
self.run_id = kwargs.get('run_id', None)
self.task_result = kwargs.get('task_result', None)
self.failure_type = kwargs.get('failure_type', None)
self.failure_name = kwargs.get('failure_name', None)
self.responsible_team = kwargs.get('responsible_team', None)
class ErrorAdditionalInfo(msrest.serialization.Model):
"""The resource management error additional info.
:ivar type: The additional info type.
:vartype type: str
:ivar info: The additional info.
:vartype info: any
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'info': {'key': 'info', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: The additional info type.
:paramtype type: str
:keyword info: The additional info.
:paramtype info: any
"""
super(ErrorAdditionalInfo, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.info = kwargs.get('info', None)
class ErrorResponse(msrest.serialization.Model):
"""The error response.
:ivar error: The root error.
:vartype error: ~flow.models.RootError
:ivar correlation: Dictionary containing correlation details for the error.
:vartype correlation: dict[str, str]
:ivar environment: The hosting environment.
:vartype environment: str
:ivar location: The Azure region.
:vartype location: str
:ivar time: The time in UTC.
:vartype time: ~datetime.datetime
:ivar component_name: Component name where error originated/encountered.
:vartype component_name: str
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'RootError'},
'correlation': {'key': 'correlation', 'type': '{str}'},
'environment': {'key': 'environment', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'time': {'key': 'time', 'type': 'iso-8601'},
'component_name': {'key': 'componentName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword error: The root error.
:paramtype error: ~flow.models.RootError
:keyword correlation: Dictionary containing correlation details for the error.
:paramtype correlation: dict[str, str]
:keyword environment: The hosting environment.
:paramtype environment: str
:keyword location: The Azure region.
:paramtype location: str
:keyword time: The time in UTC.
:paramtype time: ~datetime.datetime
:keyword component_name: Component name where error originated/encountered.
:paramtype component_name: str
"""
super(ErrorResponse, self).__init__(**kwargs)
self.error = kwargs.get('error', None)
self.correlation = kwargs.get('correlation', None)
self.environment = kwargs.get('environment', None)
self.location = kwargs.get('location', None)
self.time = kwargs.get('time', None)
self.component_name = kwargs.get('component_name', None)
class EsCloudConfiguration(msrest.serialization.Model):
"""EsCloudConfiguration.
:ivar enable_output_to_file_based_on_data_type_id:
:vartype enable_output_to_file_based_on_data_type_id: bool
:ivar environment:
:vartype environment: ~flow.models.EnvironmentConfiguration
:ivar hyper_drive_configuration:
:vartype hyper_drive_configuration: ~flow.models.HyperDriveConfiguration
:ivar k8_s_config:
:vartype k8_s_config: ~flow.models.K8SConfiguration
:ivar resource_config:
:vartype resource_config: ~flow.models.AEVAResourceConfiguration
:ivar torch_distributed_config:
:vartype torch_distributed_config: ~flow.models.TorchDistributedConfiguration
:ivar target_selector_config:
:vartype target_selector_config: ~flow.models.TargetSelectorConfiguration
:ivar docker_config:
:vartype docker_config: ~flow.models.DockerSettingConfiguration
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar max_run_duration_seconds:
:vartype max_run_duration_seconds: int
:ivar identity:
:vartype identity: ~flow.models.IdentitySetting
:ivar application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:vartype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:ivar run_config:
:vartype run_config: str
"""
_attribute_map = {
'enable_output_to_file_based_on_data_type_id': {'key': 'enableOutputToFileBasedOnDataTypeId', 'type': 'bool'},
'environment': {'key': 'environment', 'type': 'EnvironmentConfiguration'},
'hyper_drive_configuration': {'key': 'hyperDriveConfiguration', 'type': 'HyperDriveConfiguration'},
'k8_s_config': {'key': 'k8sConfig', 'type': 'K8SConfiguration'},
'resource_config': {'key': 'resourceConfig', 'type': 'AEVAResourceConfiguration'},
'torch_distributed_config': {'key': 'torchDistributedConfig', 'type': 'TorchDistributedConfiguration'},
'target_selector_config': {'key': 'targetSelectorConfig', 'type': 'TargetSelectorConfiguration'},
'docker_config': {'key': 'dockerConfig', 'type': 'DockerSettingConfiguration'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'int'},
'identity': {'key': 'identity', 'type': 'IdentitySetting'},
'application_endpoints': {'key': 'applicationEndpoints', 'type': '{ApplicationEndpointConfiguration}'},
'run_config': {'key': 'runConfig', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword enable_output_to_file_based_on_data_type_id:
:paramtype enable_output_to_file_based_on_data_type_id: bool
:keyword environment:
:paramtype environment: ~flow.models.EnvironmentConfiguration
:keyword hyper_drive_configuration:
:paramtype hyper_drive_configuration: ~flow.models.HyperDriveConfiguration
:keyword k8_s_config:
:paramtype k8_s_config: ~flow.models.K8SConfiguration
:keyword resource_config:
:paramtype resource_config: ~flow.models.AEVAResourceConfiguration
:keyword torch_distributed_config:
:paramtype torch_distributed_config: ~flow.models.TorchDistributedConfiguration
:keyword target_selector_config:
:paramtype target_selector_config: ~flow.models.TargetSelectorConfiguration
:keyword docker_config:
:paramtype docker_config: ~flow.models.DockerSettingConfiguration
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword max_run_duration_seconds:
:paramtype max_run_duration_seconds: int
:keyword identity:
:paramtype identity: ~flow.models.IdentitySetting
:keyword application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:paramtype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:keyword run_config:
:paramtype run_config: str
"""
super(EsCloudConfiguration, self).__init__(**kwargs)
self.enable_output_to_file_based_on_data_type_id = kwargs.get('enable_output_to_file_based_on_data_type_id', None)
self.environment = kwargs.get('environment', None)
self.hyper_drive_configuration = kwargs.get('hyper_drive_configuration', None)
self.k8_s_config = kwargs.get('k8_s_config', None)
self.resource_config = kwargs.get('resource_config', None)
self.torch_distributed_config = kwargs.get('torch_distributed_config', None)
self.target_selector_config = kwargs.get('target_selector_config', None)
self.docker_config = kwargs.get('docker_config', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.max_run_duration_seconds = kwargs.get('max_run_duration_seconds', None)
self.identity = kwargs.get('identity', None)
self.application_endpoints = kwargs.get('application_endpoints', None)
self.run_config = kwargs.get('run_config', None)
class EvaluationFlowRunSettings(msrest.serialization.Model):
"""EvaluationFlowRunSettings.
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar variant_run_variants:
:vartype variant_run_variants: list[~flow.models.VariantIdentifier]
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar input_universal_link:
:vartype input_universal_link: str
:ivar data_inputs: This is a dictionary.
:vartype data_inputs: dict[str, str]
:ivar flow_run_output_directory:
:vartype flow_run_output_directory: str
:ivar connection_overrides:
:vartype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar runtime_name:
:vartype runtime_name: str
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar connections: This is a dictionary.
:vartype connections: dict[str, dict[str, str]]
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, str]
:ivar output_data_store:
:vartype output_data_store: str
:ivar run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:vartype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar worker_count:
:vartype worker_count: int
:ivar timeout_in_seconds:
:vartype timeout_in_seconds: int
:ivar promptflow_engine_type: Possible values include: "FastEngine", "ScalableEngine".
:vartype promptflow_engine_type: str or ~flow.models.PromptflowEngineType
"""
_attribute_map = {
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'variant_run_variants': {'key': 'variantRunVariants', 'type': '[VariantIdentifier]'},
'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'},
'input_universal_link': {'key': 'inputUniversalLink', 'type': 'str'},
'data_inputs': {'key': 'dataInputs', 'type': '{str}'},
'flow_run_output_directory': {'key': 'flowRunOutputDirectory', 'type': 'str'},
'connection_overrides': {'key': 'connectionOverrides', 'type': '[ConnectionOverrideSetting]'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'connections': {'key': 'connections', 'type': '{{str}}'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'output_data_store': {'key': 'outputDataStore', 'type': 'str'},
'run_display_name_generation_type': {'key': 'runDisplayNameGenerationType', 'type': 'str'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'worker_count': {'key': 'workerCount', 'type': 'int'},
'timeout_in_seconds': {'key': 'timeoutInSeconds', 'type': 'int'},
'promptflow_engine_type': {'key': 'promptflowEngineType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword variant_run_variants:
:paramtype variant_run_variants: list[~flow.models.VariantIdentifier]
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword input_universal_link:
:paramtype input_universal_link: str
:keyword data_inputs: This is a dictionary.
:paramtype data_inputs: dict[str, str]
:keyword flow_run_output_directory:
:paramtype flow_run_output_directory: str
:keyword connection_overrides:
:paramtype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword runtime_name:
:paramtype runtime_name: str
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword connections: This is a dictionary.
:paramtype connections: dict[str, dict[str, str]]
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, str]
:keyword output_data_store:
:paramtype output_data_store: str
:keyword run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:paramtype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword worker_count:
:paramtype worker_count: int
:keyword timeout_in_seconds:
:paramtype timeout_in_seconds: int
:keyword promptflow_engine_type: Possible values include: "FastEngine", "ScalableEngine".
:paramtype promptflow_engine_type: str or ~flow.models.PromptflowEngineType
"""
super(EvaluationFlowRunSettings, self).__init__(**kwargs)
self.flow_run_id = kwargs.get('flow_run_id', None)
self.variant_run_variants = kwargs.get('variant_run_variants', None)
self.batch_inputs = kwargs.get('batch_inputs', None)
self.input_universal_link = kwargs.get('input_universal_link', None)
self.data_inputs = kwargs.get('data_inputs', None)
self.flow_run_output_directory = kwargs.get('flow_run_output_directory', None)
self.connection_overrides = kwargs.get('connection_overrides', None)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
self.inputs_mapping = kwargs.get('inputs_mapping', None)
self.connections = kwargs.get('connections', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.output_data_store = kwargs.get('output_data_store', None)
self.run_display_name_generation_type = kwargs.get('run_display_name_generation_type', None)
self.aml_compute_name = kwargs.get('aml_compute_name', None)
self.worker_count = kwargs.get('worker_count', None)
self.timeout_in_seconds = kwargs.get('timeout_in_seconds', None)
self.promptflow_engine_type = kwargs.get('promptflow_engine_type', None)
class ExampleRequest(msrest.serialization.Model):
"""ExampleRequest.
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, list[list[any]]]
:ivar global_parameters: This is a dictionary.
:vartype global_parameters: dict[str, any]
"""
_attribute_map = {
'inputs': {'key': 'inputs', 'type': '{[[object]]}'},
'global_parameters': {'key': 'globalParameters', 'type': '{object}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, list[list[any]]]
:keyword global_parameters: This is a dictionary.
:paramtype global_parameters: dict[str, any]
"""
super(ExampleRequest, self).__init__(**kwargs)
self.inputs = kwargs.get('inputs', None)
self.global_parameters = kwargs.get('global_parameters', None)
class ExecutionContextDto(msrest.serialization.Model):
"""ExecutionContextDto.
:ivar executable:
:vartype executable: str
:ivar user_code:
:vartype user_code: str
:ivar arguments:
:vartype arguments: str
"""
_attribute_map = {
'executable': {'key': 'executable', 'type': 'str'},
'user_code': {'key': 'userCode', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword executable:
:paramtype executable: str
:keyword user_code:
:paramtype user_code: str
:keyword arguments:
:paramtype arguments: str
"""
super(ExecutionContextDto, self).__init__(**kwargs)
self.executable = kwargs.get('executable', None)
self.user_code = kwargs.get('user_code', None)
self.arguments = kwargs.get('arguments', None)
class ExecutionDataLocation(msrest.serialization.Model):
"""ExecutionDataLocation.
:ivar dataset:
:vartype dataset: ~flow.models.RunDatasetReference
:ivar data_path:
:vartype data_path: ~flow.models.ExecutionDataPath
:ivar uri:
:vartype uri: ~flow.models.UriReference
:ivar type:
:vartype type: str
"""
_attribute_map = {
'dataset': {'key': 'dataset', 'type': 'RunDatasetReference'},
'data_path': {'key': 'dataPath', 'type': 'ExecutionDataPath'},
'uri': {'key': 'uri', 'type': 'UriReference'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword dataset:
:paramtype dataset: ~flow.models.RunDatasetReference
:keyword data_path:
:paramtype data_path: ~flow.models.ExecutionDataPath
:keyword uri:
:paramtype uri: ~flow.models.UriReference
:keyword type:
:paramtype type: str
"""
super(ExecutionDataLocation, self).__init__(**kwargs)
self.dataset = kwargs.get('dataset', None)
self.data_path = kwargs.get('data_path', None)
self.uri = kwargs.get('uri', None)
self.type = kwargs.get('type', None)
class ExecutionDataPath(msrest.serialization.Model):
"""ExecutionDataPath.
:ivar datastore_name:
:vartype datastore_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'datastore_name': {'key': 'datastoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword datastore_name:
:paramtype datastore_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(ExecutionDataPath, self).__init__(**kwargs)
self.datastore_name = kwargs.get('datastore_name', None)
self.relative_path = kwargs.get('relative_path', None)
class ExecutionGlobsOptions(msrest.serialization.Model):
"""ExecutionGlobsOptions.
:ivar glob_patterns:
:vartype glob_patterns: list[str]
"""
_attribute_map = {
'glob_patterns': {'key': 'globPatterns', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword glob_patterns:
:paramtype glob_patterns: list[str]
"""
super(ExecutionGlobsOptions, self).__init__(**kwargs)
self.glob_patterns = kwargs.get('glob_patterns', None)
class ExperimentComputeMetaInfo(msrest.serialization.Model):
"""ExperimentComputeMetaInfo.
:ivar current_node_count:
:vartype current_node_count: int
:ivar target_node_count:
:vartype target_node_count: int
:ivar max_node_count:
:vartype max_node_count: int
:ivar min_node_count:
:vartype min_node_count: int
:ivar idle_node_count:
:vartype idle_node_count: int
:ivar running_node_count:
:vartype running_node_count: int
:ivar preparing_node_count:
:vartype preparing_node_count: int
:ivar unusable_node_count:
:vartype unusable_node_count: int
:ivar leaving_node_count:
:vartype leaving_node_count: int
:ivar preempted_node_count:
:vartype preempted_node_count: int
:ivar vm_size:
:vartype vm_size: str
:ivar location:
:vartype location: str
:ivar provisioning_state:
:vartype provisioning_state: str
:ivar state:
:vartype state: str
:ivar os_type:
:vartype os_type: str
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar created_by_studio:
:vartype created_by_studio: bool
:ivar is_gpu_type:
:vartype is_gpu_type: bool
:ivar resource_id:
:vartype resource_id: str
:ivar compute_type:
:vartype compute_type: str
"""
_attribute_map = {
'current_node_count': {'key': 'currentNodeCount', 'type': 'int'},
'target_node_count': {'key': 'targetNodeCount', 'type': 'int'},
'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'},
'running_node_count': {'key': 'runningNodeCount', 'type': 'int'},
'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'},
'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'},
'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'},
'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'created_by_studio': {'key': 'createdByStudio', 'type': 'bool'},
'is_gpu_type': {'key': 'isGpuType', 'type': 'bool'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword current_node_count:
:paramtype current_node_count: int
:keyword target_node_count:
:paramtype target_node_count: int
:keyword max_node_count:
:paramtype max_node_count: int
:keyword min_node_count:
:paramtype min_node_count: int
:keyword idle_node_count:
:paramtype idle_node_count: int
:keyword running_node_count:
:paramtype running_node_count: int
:keyword preparing_node_count:
:paramtype preparing_node_count: int
:keyword unusable_node_count:
:paramtype unusable_node_count: int
:keyword leaving_node_count:
:paramtype leaving_node_count: int
:keyword preempted_node_count:
:paramtype preempted_node_count: int
:keyword vm_size:
:paramtype vm_size: str
:keyword location:
:paramtype location: str
:keyword provisioning_state:
:paramtype provisioning_state: str
:keyword state:
:paramtype state: str
:keyword os_type:
:paramtype os_type: str
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword created_by_studio:
:paramtype created_by_studio: bool
:keyword is_gpu_type:
:paramtype is_gpu_type: bool
:keyword resource_id:
:paramtype resource_id: str
:keyword compute_type:
:paramtype compute_type: str
"""
super(ExperimentComputeMetaInfo, self).__init__(**kwargs)
self.current_node_count = kwargs.get('current_node_count', None)
self.target_node_count = kwargs.get('target_node_count', None)
self.max_node_count = kwargs.get('max_node_count', None)
self.min_node_count = kwargs.get('min_node_count', None)
self.idle_node_count = kwargs.get('idle_node_count', None)
self.running_node_count = kwargs.get('running_node_count', None)
self.preparing_node_count = kwargs.get('preparing_node_count', None)
self.unusable_node_count = kwargs.get('unusable_node_count', None)
self.leaving_node_count = kwargs.get('leaving_node_count', None)
self.preempted_node_count = kwargs.get('preempted_node_count', None)
self.vm_size = kwargs.get('vm_size', None)
self.location = kwargs.get('location', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.state = kwargs.get('state', None)
self.os_type = kwargs.get('os_type', None)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.created_by_studio = kwargs.get('created_by_studio', None)
self.is_gpu_type = kwargs.get('is_gpu_type', None)
self.resource_id = kwargs.get('resource_id', None)
self.compute_type = kwargs.get('compute_type', None)
class ExperimentInfo(msrest.serialization.Model):
"""ExperimentInfo.
:ivar experiment_name:
:vartype experiment_name: str
:ivar experiment_id:
:vartype experiment_id: str
"""
_attribute_map = {
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword experiment_name:
:paramtype experiment_name: str
:keyword experiment_id:
:paramtype experiment_id: str
"""
super(ExperimentInfo, self).__init__(**kwargs)
self.experiment_name = kwargs.get('experiment_name', None)
self.experiment_id = kwargs.get('experiment_id', None)
class ExportComponentMetaInfo(msrest.serialization.Model):
"""ExportComponentMetaInfo.
:ivar module_entity:
:vartype module_entity: ~flow.models.ModuleEntity
:ivar module_version:
:vartype module_version: str
:ivar is_anonymous:
:vartype is_anonymous: bool
"""
_attribute_map = {
'module_entity': {'key': 'moduleEntity', 'type': 'ModuleEntity'},
'module_version': {'key': 'moduleVersion', 'type': 'str'},
'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword module_entity:
:paramtype module_entity: ~flow.models.ModuleEntity
:keyword module_version:
:paramtype module_version: str
:keyword is_anonymous:
:paramtype is_anonymous: bool
"""
super(ExportComponentMetaInfo, self).__init__(**kwargs)
self.module_entity = kwargs.get('module_entity', None)
self.module_version = kwargs.get('module_version', None)
self.is_anonymous = kwargs.get('is_anonymous', None)
class ExportDataTask(msrest.serialization.Model):
"""ExportDataTask.
:ivar data_transfer_sink:
:vartype data_transfer_sink: ~flow.models.DataTransferSink
"""
_attribute_map = {
'data_transfer_sink': {'key': 'DataTransferSink', 'type': 'DataTransferSink'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_transfer_sink:
:paramtype data_transfer_sink: ~flow.models.DataTransferSink
"""
super(ExportDataTask, self).__init__(**kwargs)
self.data_transfer_sink = kwargs.get('data_transfer_sink', None)
class FeaturizationSettings(msrest.serialization.Model):
"""FeaturizationSettings.
:ivar mode: Possible values include: "Auto", "Custom", "Off".
:vartype mode: str or ~flow.models.FeaturizationMode
:ivar blocked_transformers:
:vartype blocked_transformers: list[str]
:ivar column_purposes: Dictionary of :code:`<string>`.
:vartype column_purposes: dict[str, str]
:ivar drop_columns:
:vartype drop_columns: list[str]
:ivar transformer_params: Dictionary of
<components·1gi3krm·schemas·featurizationsettings·properties·transformerparams·additionalproperties>.
:vartype transformer_params: dict[str, list[~flow.models.ColumnTransformer]]
:ivar dataset_language:
:vartype dataset_language: str
:ivar enable_dnn_featurization:
:vartype enable_dnn_featurization: bool
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'blocked_transformers': {'key': 'blockedTransformers', 'type': '[str]'},
'column_purposes': {'key': 'columnPurposes', 'type': '{str}'},
'drop_columns': {'key': 'dropColumns', 'type': '[str]'},
'transformer_params': {'key': 'transformerParams', 'type': '{[ColumnTransformer]}'},
'dataset_language': {'key': 'datasetLanguage', 'type': 'str'},
'enable_dnn_featurization': {'key': 'enableDnnFeaturization', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom", "Off".
:paramtype mode: str or ~flow.models.FeaturizationMode
:keyword blocked_transformers:
:paramtype blocked_transformers: list[str]
:keyword column_purposes: Dictionary of :code:`<string>`.
:paramtype column_purposes: dict[str, str]
:keyword drop_columns:
:paramtype drop_columns: list[str]
:keyword transformer_params: Dictionary of
<components·1gi3krm·schemas·featurizationsettings·properties·transformerparams·additionalproperties>.
:paramtype transformer_params: dict[str, list[~flow.models.ColumnTransformer]]
:keyword dataset_language:
:paramtype dataset_language: str
:keyword enable_dnn_featurization:
:paramtype enable_dnn_featurization: bool
"""
super(FeaturizationSettings, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.blocked_transformers = kwargs.get('blocked_transformers', None)
self.column_purposes = kwargs.get('column_purposes', None)
self.drop_columns = kwargs.get('drop_columns', None)
self.transformer_params = kwargs.get('transformer_params', None)
self.dataset_language = kwargs.get('dataset_language', None)
self.enable_dnn_featurization = kwargs.get('enable_dnn_featurization', None)
class FeedDto(msrest.serialization.Model):
"""FeedDto.
:ivar name:
:vartype name: str
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar sharing_scopes:
:vartype sharing_scopes: list[~flow.models.SharingScope]
:ivar supported_asset_types:
:vartype supported_asset_types: ~flow.models.FeedDtoSupportedAssetTypes
:ivar regional_workspace_storage: This is a dictionary.
:vartype regional_workspace_storage: dict[str, list[str]]
:ivar intellectual_property_publisher:
:vartype intellectual_property_publisher: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'sharing_scopes': {'key': 'sharingScopes', 'type': '[SharingScope]'},
'supported_asset_types': {'key': 'supportedAssetTypes', 'type': 'FeedDtoSupportedAssetTypes'},
'regional_workspace_storage': {'key': 'regionalWorkspaceStorage', 'type': '{[str]}'},
'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword sharing_scopes:
:paramtype sharing_scopes: list[~flow.models.SharingScope]
:keyword supported_asset_types:
:paramtype supported_asset_types: ~flow.models.FeedDtoSupportedAssetTypes
:keyword regional_workspace_storage: This is a dictionary.
:paramtype regional_workspace_storage: dict[str, list[str]]
:keyword intellectual_property_publisher:
:paramtype intellectual_property_publisher: str
"""
super(FeedDto, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.description = kwargs.get('description', None)
self.sharing_scopes = kwargs.get('sharing_scopes', None)
self.supported_asset_types = kwargs.get('supported_asset_types', None)
self.regional_workspace_storage = kwargs.get('regional_workspace_storage', None)
self.intellectual_property_publisher = kwargs.get('intellectual_property_publisher', None)
class FeedDtoSupportedAssetTypes(msrest.serialization.Model):
"""FeedDtoSupportedAssetTypes.
:ivar component:
:vartype component: ~flow.models.AssetTypeMetaInfo
:ivar model:
:vartype model: ~flow.models.AssetTypeMetaInfo
:ivar environment:
:vartype environment: ~flow.models.AssetTypeMetaInfo
:ivar dataset:
:vartype dataset: ~flow.models.AssetTypeMetaInfo
:ivar data_store:
:vartype data_store: ~flow.models.AssetTypeMetaInfo
:ivar sample_graph:
:vartype sample_graph: ~flow.models.AssetTypeMetaInfo
:ivar flow_tool:
:vartype flow_tool: ~flow.models.AssetTypeMetaInfo
:ivar flow_tool_setting:
:vartype flow_tool_setting: ~flow.models.AssetTypeMetaInfo
:ivar flow_connection:
:vartype flow_connection: ~flow.models.AssetTypeMetaInfo
:ivar flow_sample:
:vartype flow_sample: ~flow.models.AssetTypeMetaInfo
:ivar flow_runtime_spec:
:vartype flow_runtime_spec: ~flow.models.AssetTypeMetaInfo
"""
_attribute_map = {
'component': {'key': 'Component', 'type': 'AssetTypeMetaInfo'},
'model': {'key': 'Model', 'type': 'AssetTypeMetaInfo'},
'environment': {'key': 'Environment', 'type': 'AssetTypeMetaInfo'},
'dataset': {'key': 'Dataset', 'type': 'AssetTypeMetaInfo'},
'data_store': {'key': 'DataStore', 'type': 'AssetTypeMetaInfo'},
'sample_graph': {'key': 'SampleGraph', 'type': 'AssetTypeMetaInfo'},
'flow_tool': {'key': 'FlowTool', 'type': 'AssetTypeMetaInfo'},
'flow_tool_setting': {'key': 'FlowToolSetting', 'type': 'AssetTypeMetaInfo'},
'flow_connection': {'key': 'FlowConnection', 'type': 'AssetTypeMetaInfo'},
'flow_sample': {'key': 'FlowSample', 'type': 'AssetTypeMetaInfo'},
'flow_runtime_spec': {'key': 'FlowRuntimeSpec', 'type': 'AssetTypeMetaInfo'},
}
def __init__(
self,
**kwargs
):
"""
:keyword component:
:paramtype component: ~flow.models.AssetTypeMetaInfo
:keyword model:
:paramtype model: ~flow.models.AssetTypeMetaInfo
:keyword environment:
:paramtype environment: ~flow.models.AssetTypeMetaInfo
:keyword dataset:
:paramtype dataset: ~flow.models.AssetTypeMetaInfo
:keyword data_store:
:paramtype data_store: ~flow.models.AssetTypeMetaInfo
:keyword sample_graph:
:paramtype sample_graph: ~flow.models.AssetTypeMetaInfo
:keyword flow_tool:
:paramtype flow_tool: ~flow.models.AssetTypeMetaInfo
:keyword flow_tool_setting:
:paramtype flow_tool_setting: ~flow.models.AssetTypeMetaInfo
:keyword flow_connection:
:paramtype flow_connection: ~flow.models.AssetTypeMetaInfo
:keyword flow_sample:
:paramtype flow_sample: ~flow.models.AssetTypeMetaInfo
:keyword flow_runtime_spec:
:paramtype flow_runtime_spec: ~flow.models.AssetTypeMetaInfo
"""
super(FeedDtoSupportedAssetTypes, self).__init__(**kwargs)
self.component = kwargs.get('component', None)
self.model = kwargs.get('model', None)
self.environment = kwargs.get('environment', None)
self.dataset = kwargs.get('dataset', None)
self.data_store = kwargs.get('data_store', None)
self.sample_graph = kwargs.get('sample_graph', None)
self.flow_tool = kwargs.get('flow_tool', None)
self.flow_tool_setting = kwargs.get('flow_tool_setting', None)
self.flow_connection = kwargs.get('flow_connection', None)
self.flow_sample = kwargs.get('flow_sample', None)
self.flow_runtime_spec = kwargs.get('flow_runtime_spec', None)
class FileSystem(msrest.serialization.Model):
"""FileSystem.
:ivar connection:
:vartype connection: str
:ivar path:
:vartype path: str
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword path:
:paramtype path: str
"""
super(FileSystem, self).__init__(**kwargs)
self.connection = kwargs.get('connection', None)
self.path = kwargs.get('path', None)
class Flow(msrest.serialization.Model):
"""Flow.
:ivar source_resource_id:
:vartype source_resource_id: str
:ivar flow_graph:
:vartype flow_graph: ~flow.models.FlowGraph
:ivar node_variants: This is a dictionary.
:vartype node_variants: dict[str, ~flow.models.NodeVariant]
:ivar flow_graph_layout:
:vartype flow_graph_layout: ~flow.models.FlowGraphLayout
:ivar bulk_test_data: This is a dictionary.
:vartype bulk_test_data: dict[str, str]
:ivar evaluation_flows: This is a dictionary.
:vartype evaluation_flows: dict[str, ~flow.models.FlowGraphReference]
"""
_attribute_map = {
'source_resource_id': {'key': 'sourceResourceId', 'type': 'str'},
'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'},
'node_variants': {'key': 'nodeVariants', 'type': '{NodeVariant}'},
'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'},
'bulk_test_data': {'key': 'bulkTestData', 'type': '{str}'},
'evaluation_flows': {'key': 'evaluationFlows', 'type': '{FlowGraphReference}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword source_resource_id:
:paramtype source_resource_id: str
:keyword flow_graph:
:paramtype flow_graph: ~flow.models.FlowGraph
:keyword node_variants: This is a dictionary.
:paramtype node_variants: dict[str, ~flow.models.NodeVariant]
:keyword flow_graph_layout:
:paramtype flow_graph_layout: ~flow.models.FlowGraphLayout
:keyword bulk_test_data: This is a dictionary.
:paramtype bulk_test_data: dict[str, str]
:keyword evaluation_flows: This is a dictionary.
:paramtype evaluation_flows: dict[str, ~flow.models.FlowGraphReference]
"""
super(Flow, self).__init__(**kwargs)
self.source_resource_id = kwargs.get('source_resource_id', None)
self.flow_graph = kwargs.get('flow_graph', None)
self.node_variants = kwargs.get('node_variants', None)
self.flow_graph_layout = kwargs.get('flow_graph_layout', None)
self.bulk_test_data = kwargs.get('bulk_test_data', None)
self.evaluation_flows = kwargs.get('evaluation_flows', None)
class FlowAnnotations(msrest.serialization.Model):
"""FlowAnnotations.
:ivar flow_name:
:vartype flow_name: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
:ivar is_archived:
:vartype is_archived: bool
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar archived:
:vartype archived: bool
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
"""
_attribute_map = {
'flow_name': {'key': 'flowName', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'archived': {'key': 'archived', 'type': 'bool'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_name:
:paramtype flow_name: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
:keyword is_archived:
:paramtype is_archived: bool
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword archived:
:paramtype archived: bool
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
"""
super(FlowAnnotations, self).__init__(**kwargs)
self.flow_name = kwargs.get('flow_name', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
self.owner = kwargs.get('owner', None)
self.is_archived = kwargs.get('is_archived', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.archived = kwargs.get('archived', None)
self.tags = kwargs.get('tags', None)
class FlowBaseDto(msrest.serialization.Model):
"""FlowBaseDto.
:ivar flow_id:
:vartype flow_id: str
:ivar flow_name:
:vartype flow_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar experiment_id:
:vartype experiment_id: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
:ivar flow_resource_id:
:vartype flow_resource_id: str
:ivar is_archived:
:vartype is_archived: bool
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'flow_id': {'key': 'flowId', 'type': 'str'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
'flow_resource_id': {'key': 'flowResourceId', 'type': 'str'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_id:
:paramtype flow_id: str
:keyword flow_name:
:paramtype flow_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword experiment_id:
:paramtype experiment_id: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
:keyword flow_resource_id:
:paramtype flow_resource_id: str
:keyword is_archived:
:paramtype is_archived: bool
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(FlowBaseDto, self).__init__(**kwargs)
self.flow_id = kwargs.get('flow_id', None)
self.flow_name = kwargs.get('flow_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.flow_type = kwargs.get('flow_type', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
self.owner = kwargs.get('owner', None)
self.flow_resource_id = kwargs.get('flow_resource_id', None)
self.is_archived = kwargs.get('is_archived', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.identity = kwargs.get('identity', None)
class FlowDto(msrest.serialization.Model):
"""FlowDto.
:ivar timestamp:
:vartype timestamp: ~datetime.datetime
:ivar e_tag: Any object.
:vartype e_tag: any
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_run_settings:
:vartype flow_run_settings: ~flow.models.FlowRunSettings
:ivar flow_run_result:
:vartype flow_run_result: ~flow.models.FlowRunResult
:ivar flow_test_mode: Possible values include: "Sync", "Async".
:vartype flow_test_mode: str or ~flow.models.FlowTestMode
:ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:ivar studio_portal_endpoint:
:vartype studio_portal_endpoint: str
:ivar flow_id:
:vartype flow_id: str
:ivar flow_name:
:vartype flow_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar experiment_id:
:vartype experiment_id: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
:ivar flow_resource_id:
:vartype flow_resource_id: str
:ivar is_archived:
:vartype is_archived: bool
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'e_tag': {'key': 'eTag', 'type': 'object'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'},
'flow_run_result': {'key': 'flowRunResult', 'type': 'FlowRunResult'},
'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'},
'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'},
'studio_portal_endpoint': {'key': 'studioPortalEndpoint', 'type': 'str'},
'flow_id': {'key': 'flowId', 'type': 'str'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
'flow_resource_id': {'key': 'flowResourceId', 'type': 'str'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword timestamp:
:paramtype timestamp: ~datetime.datetime
:keyword e_tag: Any object.
:paramtype e_tag: any
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_run_settings:
:paramtype flow_run_settings: ~flow.models.FlowRunSettings
:keyword flow_run_result:
:paramtype flow_run_result: ~flow.models.FlowRunResult
:keyword flow_test_mode: Possible values include: "Sync", "Async".
:paramtype flow_test_mode: str or ~flow.models.FlowTestMode
:keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:keyword studio_portal_endpoint:
:paramtype studio_portal_endpoint: str
:keyword flow_id:
:paramtype flow_id: str
:keyword flow_name:
:paramtype flow_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword experiment_id:
:paramtype experiment_id: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
:keyword flow_resource_id:
:paramtype flow_resource_id: str
:keyword is_archived:
:paramtype is_archived: bool
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(FlowDto, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.e_tag = kwargs.get('e_tag', None)
self.flow = kwargs.get('flow', None)
self.flow_run_settings = kwargs.get('flow_run_settings', None)
self.flow_run_result = kwargs.get('flow_run_result', None)
self.flow_test_mode = kwargs.get('flow_test_mode', None)
self.flow_test_infos = kwargs.get('flow_test_infos', None)
self.studio_portal_endpoint = kwargs.get('studio_portal_endpoint', None)
self.flow_id = kwargs.get('flow_id', None)
self.flow_name = kwargs.get('flow_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.flow_type = kwargs.get('flow_type', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
self.owner = kwargs.get('owner', None)
self.flow_resource_id = kwargs.get('flow_resource_id', None)
self.is_archived = kwargs.get('is_archived', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.identity = kwargs.get('identity', None)
class FlowEnvironment(msrest.serialization.Model):
"""FlowEnvironment.
:ivar image:
:vartype image: str
:ivar python_requirements_txt:
:vartype python_requirements_txt: str
"""
_attribute_map = {
'image': {'key': 'image', 'type': 'str'},
'python_requirements_txt': {'key': 'python_requirements_txt', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword image:
:paramtype image: str
:keyword python_requirements_txt:
:paramtype python_requirements_txt: str
"""
super(FlowEnvironment, self).__init__(**kwargs)
self.image = kwargs.get('image', None)
self.python_requirements_txt = kwargs.get('python_requirements_txt', None)
class FlowFeature(msrest.serialization.Model):
"""FlowFeature.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar state:
:vartype state: ~flow.models.FlowFeatureState
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'state': {'key': 'state', 'type': 'FlowFeatureState'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword state:
:paramtype state: ~flow.models.FlowFeatureState
"""
super(FlowFeature, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.state = kwargs.get('state', None)
class FlowFeatureState(msrest.serialization.Model):
"""FlowFeatureState.
:ivar runtime: Possible values include: "Ready", "E2ETest".
:vartype runtime: str or ~flow.models.FlowFeatureStateEnum
:ivar executor: Possible values include: "Ready", "E2ETest".
:vartype executor: str or ~flow.models.FlowFeatureStateEnum
:ivar pfs: Possible values include: "Ready", "E2ETest".
:vartype pfs: str or ~flow.models.FlowFeatureStateEnum
"""
_attribute_map = {
'runtime': {'key': 'Runtime', 'type': 'str'},
'executor': {'key': 'Executor', 'type': 'str'},
'pfs': {'key': 'PFS', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword runtime: Possible values include: "Ready", "E2ETest".
:paramtype runtime: str or ~flow.models.FlowFeatureStateEnum
:keyword executor: Possible values include: "Ready", "E2ETest".
:paramtype executor: str or ~flow.models.FlowFeatureStateEnum
:keyword pfs: Possible values include: "Ready", "E2ETest".
:paramtype pfs: str or ~flow.models.FlowFeatureStateEnum
"""
super(FlowFeatureState, self).__init__(**kwargs)
self.runtime = kwargs.get('runtime', None)
self.executor = kwargs.get('executor', None)
self.pfs = kwargs.get('pfs', None)
class FlowGraph(msrest.serialization.Model):
"""FlowGraph.
:ivar nodes:
:vartype nodes: list[~flow.models.Node]
:ivar tools:
:vartype tools: list[~flow.models.Tool]
:ivar codes: This is a dictionary.
:vartype codes: dict[str, str]
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.FlowInputDefinition]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.FlowOutputDefinition]
"""
_attribute_map = {
'nodes': {'key': 'nodes', 'type': '[Node]'},
'tools': {'key': 'tools', 'type': '[Tool]'},
'codes': {'key': 'codes', 'type': '{str}'},
'inputs': {'key': 'inputs', 'type': '{FlowInputDefinition}'},
'outputs': {'key': 'outputs', 'type': '{FlowOutputDefinition}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword nodes:
:paramtype nodes: list[~flow.models.Node]
:keyword tools:
:paramtype tools: list[~flow.models.Tool]
:keyword codes: This is a dictionary.
:paramtype codes: dict[str, str]
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.FlowInputDefinition]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.FlowOutputDefinition]
"""
super(FlowGraph, self).__init__(**kwargs)
self.nodes = kwargs.get('nodes', None)
self.tools = kwargs.get('tools', None)
self.codes = kwargs.get('codes', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
class FlowGraphAnnotationNode(msrest.serialization.Model):
"""FlowGraphAnnotationNode.
:ivar id:
:vartype id: str
:ivar content:
:vartype content: str
:ivar mentioned_node_names:
:vartype mentioned_node_names: list[str]
:ivar structured_content:
:vartype structured_content: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'content': {'key': 'content', 'type': 'str'},
'mentioned_node_names': {'key': 'mentionedNodeNames', 'type': '[str]'},
'structured_content': {'key': 'structuredContent', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword content:
:paramtype content: str
:keyword mentioned_node_names:
:paramtype mentioned_node_names: list[str]
:keyword structured_content:
:paramtype structured_content: str
"""
super(FlowGraphAnnotationNode, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.content = kwargs.get('content', None)
self.mentioned_node_names = kwargs.get('mentioned_node_names', None)
self.structured_content = kwargs.get('structured_content', None)
class FlowGraphLayout(msrest.serialization.Model):
"""FlowGraphLayout.
:ivar node_layouts: This is a dictionary.
:vartype node_layouts: dict[str, ~flow.models.FlowNodeLayout]
:ivar extended_data:
:vartype extended_data: str
:ivar annotation_nodes:
:vartype annotation_nodes: list[~flow.models.FlowGraphAnnotationNode]
:ivar orientation: Possible values include: "Horizontal", "Vertical".
:vartype orientation: str or ~flow.models.Orientation
"""
_attribute_map = {
'node_layouts': {'key': 'nodeLayouts', 'type': '{FlowNodeLayout}'},
'extended_data': {'key': 'extendedData', 'type': 'str'},
'annotation_nodes': {'key': 'annotationNodes', 'type': '[FlowGraphAnnotationNode]'},
'orientation': {'key': 'orientation', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_layouts: This is a dictionary.
:paramtype node_layouts: dict[str, ~flow.models.FlowNodeLayout]
:keyword extended_data:
:paramtype extended_data: str
:keyword annotation_nodes:
:paramtype annotation_nodes: list[~flow.models.FlowGraphAnnotationNode]
:keyword orientation: Possible values include: "Horizontal", "Vertical".
:paramtype orientation: str or ~flow.models.Orientation
"""
super(FlowGraphLayout, self).__init__(**kwargs)
self.node_layouts = kwargs.get('node_layouts', None)
self.extended_data = kwargs.get('extended_data', None)
self.annotation_nodes = kwargs.get('annotation_nodes', None)
self.orientation = kwargs.get('orientation', None)
class FlowGraphReference(msrest.serialization.Model):
"""FlowGraphReference.
:ivar flow_graph:
:vartype flow_graph: ~flow.models.FlowGraph
:ivar reference_resource_id:
:vartype reference_resource_id: str
:ivar variant:
:vartype variant: ~flow.models.VariantIdentifier
"""
_attribute_map = {
'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'},
'reference_resource_id': {'key': 'referenceResourceId', 'type': 'str'},
'variant': {'key': 'variant', 'type': 'VariantIdentifier'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_graph:
:paramtype flow_graph: ~flow.models.FlowGraph
:keyword reference_resource_id:
:paramtype reference_resource_id: str
:keyword variant:
:paramtype variant: ~flow.models.VariantIdentifier
"""
super(FlowGraphReference, self).__init__(**kwargs)
self.flow_graph = kwargs.get('flow_graph', None)
self.reference_resource_id = kwargs.get('reference_resource_id', None)
self.variant = kwargs.get('variant', None)
class FlowIndexEntity(msrest.serialization.Model):
"""FlowIndexEntity.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar schema_id:
:vartype schema_id: str
:ivar entity_id:
:vartype entity_id: str
:ivar kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned".
:vartype kind: str or ~flow.models.EntityKind
:ivar annotations:
:vartype annotations: ~flow.models.FlowAnnotations
:ivar properties:
:vartype properties: ~flow.models.FlowProperties
:ivar internal: Any object.
:vartype internal: any
:ivar update_sequence:
:vartype update_sequence: long
:ivar type:
:vartype type: str
:ivar version:
:vartype version: str
:ivar entity_container_id:
:vartype entity_container_id: str
:ivar entity_object_id:
:vartype entity_object_id: str
:ivar resource_type:
:vartype resource_type: str
:ivar relationships:
:vartype relationships: list[~flow.models.Relationship]
:ivar asset_id:
:vartype asset_id: str
"""
_validation = {
'version': {'readonly': True},
'entity_container_id': {'readonly': True},
'entity_object_id': {'readonly': True},
'resource_type': {'readonly': True},
}
_attribute_map = {
'schema_id': {'key': 'schemaId', 'type': 'str'},
'entity_id': {'key': 'entityId', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'annotations': {'key': 'annotations', 'type': 'FlowAnnotations'},
'properties': {'key': 'properties', 'type': 'FlowProperties'},
'internal': {'key': 'internal', 'type': 'object'},
'update_sequence': {'key': 'updateSequence', 'type': 'long'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'entity_container_id': {'key': 'entityContainerId', 'type': 'str'},
'entity_object_id': {'key': 'entityObjectId', 'type': 'str'},
'resource_type': {'key': 'resourceType', 'type': 'str'},
'relationships': {'key': 'relationships', 'type': '[Relationship]'},
'asset_id': {'key': 'assetId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword schema_id:
:paramtype schema_id: str
:keyword entity_id:
:paramtype entity_id: str
:keyword kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned".
:paramtype kind: str or ~flow.models.EntityKind
:keyword annotations:
:paramtype annotations: ~flow.models.FlowAnnotations
:keyword properties:
:paramtype properties: ~flow.models.FlowProperties
:keyword internal: Any object.
:paramtype internal: any
:keyword update_sequence:
:paramtype update_sequence: long
:keyword type:
:paramtype type: str
:keyword relationships:
:paramtype relationships: list[~flow.models.Relationship]
:keyword asset_id:
:paramtype asset_id: str
"""
super(FlowIndexEntity, self).__init__(**kwargs)
self.schema_id = kwargs.get('schema_id', None)
self.entity_id = kwargs.get('entity_id', None)
self.kind = kwargs.get('kind', None)
self.annotations = kwargs.get('annotations', None)
self.properties = kwargs.get('properties', None)
self.internal = kwargs.get('internal', None)
self.update_sequence = kwargs.get('update_sequence', None)
self.type = kwargs.get('type', None)
self.version = None
self.entity_container_id = None
self.entity_object_id = None
self.resource_type = None
self.relationships = kwargs.get('relationships', None)
self.asset_id = kwargs.get('asset_id', None)
class FlowInputDefinition(msrest.serialization.Model):
"""FlowInputDefinition.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image",
"assistant_definition".
:vartype type: str or ~flow.models.ValueType
:ivar default: Anything.
:vartype default: any
:ivar description:
:vartype description: str
:ivar is_chat_input:
:vartype is_chat_input: bool
:ivar is_chat_history:
:vartype is_chat_history: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'default': {'key': 'default', 'type': 'object'},
'description': {'key': 'description', 'type': 'str'},
'is_chat_input': {'key': 'is_chat_input', 'type': 'bool'},
'is_chat_history': {'key': 'is_chat_history', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image",
"assistant_definition".
:paramtype type: str or ~flow.models.ValueType
:keyword default: Anything.
:paramtype default: any
:keyword description:
:paramtype description: str
:keyword is_chat_input:
:paramtype is_chat_input: bool
:keyword is_chat_history:
:paramtype is_chat_history: bool
"""
super(FlowInputDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.default = kwargs.get('default', None)
self.description = kwargs.get('description', None)
self.is_chat_input = kwargs.get('is_chat_input', None)
self.is_chat_history = kwargs.get('is_chat_history', None)
class FlowNode(msrest.serialization.Model):
"""FlowNode.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp", "typescript".
:vartype type: str or ~flow.models.ToolType
:ivar source:
:vartype source: ~flow.models.NodeSource
:ivar inputs: Dictionary of :code:`<any>`.
:vartype inputs: dict[str, any]
:ivar activate:
:vartype activate: ~flow.models.Activate
:ivar use_variants:
:vartype use_variants: bool
:ivar comment:
:vartype comment: str
:ivar api:
:vartype api: str
:ivar provider:
:vartype provider: str
:ivar connection:
:vartype connection: str
:ivar module:
:vartype module: str
:ivar aggregation:
:vartype aggregation: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'source': {'key': 'source', 'type': 'NodeSource'},
'inputs': {'key': 'inputs', 'type': '{object}'},
'activate': {'key': 'activate', 'type': 'Activate'},
'use_variants': {'key': 'use_variants', 'type': 'bool'},
'comment': {'key': 'comment', 'type': 'str'},
'api': {'key': 'api', 'type': 'str'},
'provider': {'key': 'provider', 'type': 'str'},
'connection': {'key': 'connection', 'type': 'str'},
'module': {'key': 'module', 'type': 'str'},
'aggregation': {'key': 'aggregation', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp", "typescript".
:paramtype type: str or ~flow.models.ToolType
:keyword source:
:paramtype source: ~flow.models.NodeSource
:keyword inputs: Dictionary of :code:`<any>`.
:paramtype inputs: dict[str, any]
:keyword activate:
:paramtype activate: ~flow.models.Activate
:keyword use_variants:
:paramtype use_variants: bool
:keyword comment:
:paramtype comment: str
:keyword api:
:paramtype api: str
:keyword provider:
:paramtype provider: str
:keyword connection:
:paramtype connection: str
:keyword module:
:paramtype module: str
:keyword aggregation:
:paramtype aggregation: bool
"""
super(FlowNode, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.source = kwargs.get('source', None)
self.inputs = kwargs.get('inputs', None)
self.activate = kwargs.get('activate', None)
self.use_variants = kwargs.get('use_variants', None)
self.comment = kwargs.get('comment', None)
self.api = kwargs.get('api', None)
self.provider = kwargs.get('provider', None)
self.connection = kwargs.get('connection', None)
self.module = kwargs.get('module', None)
self.aggregation = kwargs.get('aggregation', None)
class FlowNodeLayout(msrest.serialization.Model):
"""FlowNodeLayout.
:ivar x:
:vartype x: float
:ivar y:
:vartype y: float
:ivar width:
:vartype width: float
:ivar height:
:vartype height: float
:ivar index:
:vartype index: int
:ivar extended_data:
:vartype extended_data: str
"""
_attribute_map = {
'x': {'key': 'x', 'type': 'float'},
'y': {'key': 'y', 'type': 'float'},
'width': {'key': 'width', 'type': 'float'},
'height': {'key': 'height', 'type': 'float'},
'index': {'key': 'index', 'type': 'int'},
'extended_data': {'key': 'extendedData', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword x:
:paramtype x: float
:keyword y:
:paramtype y: float
:keyword width:
:paramtype width: float
:keyword height:
:paramtype height: float
:keyword index:
:paramtype index: int
:keyword extended_data:
:paramtype extended_data: str
"""
super(FlowNodeLayout, self).__init__(**kwargs)
self.x = kwargs.get('x', None)
self.y = kwargs.get('y', None)
self.width = kwargs.get('width', None)
self.height = kwargs.get('height', None)
self.index = kwargs.get('index', None)
self.extended_data = kwargs.get('extended_data', None)
class FlowNodeVariant(msrest.serialization.Model):
"""FlowNodeVariant.
:ivar default_variant_id:
:vartype default_variant_id: str
:ivar variants: This is a dictionary.
:vartype variants: dict[str, ~flow.models.FlowVariantNode]
"""
_attribute_map = {
'default_variant_id': {'key': 'default_variant_id', 'type': 'str'},
'variants': {'key': 'variants', 'type': '{FlowVariantNode}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword default_variant_id:
:paramtype default_variant_id: str
:keyword variants: This is a dictionary.
:paramtype variants: dict[str, ~flow.models.FlowVariantNode]
"""
super(FlowNodeVariant, self).__init__(**kwargs)
self.default_variant_id = kwargs.get('default_variant_id', None)
self.variants = kwargs.get('variants', None)
class FlowOutputDefinition(msrest.serialization.Model):
"""FlowOutputDefinition.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image",
"assistant_definition".
:vartype type: str or ~flow.models.ValueType
:ivar description:
:vartype description: str
:ivar reference:
:vartype reference: str
:ivar evaluation_only:
:vartype evaluation_only: bool
:ivar is_chat_output:
:vartype is_chat_output: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'reference': {'key': 'reference', 'type': 'str'},
'evaluation_only': {'key': 'evaluation_only', 'type': 'bool'},
'is_chat_output': {'key': 'is_chat_output', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image",
"assistant_definition".
:paramtype type: str or ~flow.models.ValueType
:keyword description:
:paramtype description: str
:keyword reference:
:paramtype reference: str
:keyword evaluation_only:
:paramtype evaluation_only: bool
:keyword is_chat_output:
:paramtype is_chat_output: bool
"""
super(FlowOutputDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.description = kwargs.get('description', None)
self.reference = kwargs.get('reference', None)
self.evaluation_only = kwargs.get('evaluation_only', None)
self.is_chat_output = kwargs.get('is_chat_output', None)
class FlowProperties(msrest.serialization.Model):
"""FlowProperties.
:ivar flow_id:
:vartype flow_id: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar creation_context:
:vartype creation_context: ~flow.models.CreationContext
"""
_attribute_map = {
'flow_id': {'key': 'flowId', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'creation_context': {'key': 'creationContext', 'type': 'CreationContext'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_id:
:paramtype flow_id: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword creation_context:
:paramtype creation_context: ~flow.models.CreationContext
"""
super(FlowProperties, self).__init__(**kwargs)
self.flow_id = kwargs.get('flow_id', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.flow_type = kwargs.get('flow_type', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.creation_context = kwargs.get('creation_context', None)
class FlowRunBasePath(msrest.serialization.Model):
"""FlowRunBasePath.
:ivar output_datastore_name:
:vartype output_datastore_name: str
:ivar base_path:
:vartype base_path: str
"""
_attribute_map = {
'output_datastore_name': {'key': 'outputDatastoreName', 'type': 'str'},
'base_path': {'key': 'basePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword output_datastore_name:
:paramtype output_datastore_name: str
:keyword base_path:
:paramtype base_path: str
"""
super(FlowRunBasePath, self).__init__(**kwargs)
self.output_datastore_name = kwargs.get('output_datastore_name', None)
self.base_path = kwargs.get('base_path', None)
class FlowRunInfo(msrest.serialization.Model):
"""FlowRunInfo.
:ivar flow_graph:
:vartype flow_graph: ~flow.models.FlowGraph
:ivar flow_graph_layout:
:vartype flow_graph_layout: ~flow.models.FlowGraphLayout
:ivar flow_name:
:vartype flow_name: str
:ivar flow_run_resource_id:
:vartype flow_run_resource_id: str
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:vartype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar runtime_name:
:vartype runtime_name: str
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar output_datastore_name:
:vartype output_datastore_name: str
:ivar child_run_base_path:
:vartype child_run_base_path: str
:ivar working_directory:
:vartype working_directory: str
:ivar flow_dag_file_relative_path:
:vartype flow_dag_file_relative_path: str
:ivar flow_snapshot_id:
:vartype flow_snapshot_id: str
:ivar studio_portal_endpoint:
:vartype studio_portal_endpoint: str
"""
_attribute_map = {
'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'},
'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'flow_run_resource_id': {'key': 'flowRunResourceId', 'type': 'str'},
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'batch_inputs': {'key': 'batchInputs', 'type': '[{object}]'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'flow_run_type': {'key': 'flowRunType', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'output_datastore_name': {'key': 'outputDatastoreName', 'type': 'str'},
'child_run_base_path': {'key': 'childRunBasePath', 'type': 'str'},
'working_directory': {'key': 'workingDirectory', 'type': 'str'},
'flow_dag_file_relative_path': {'key': 'flowDagFileRelativePath', 'type': 'str'},
'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'},
'studio_portal_endpoint': {'key': 'studioPortalEndpoint', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_graph:
:paramtype flow_graph: ~flow.models.FlowGraph
:keyword flow_graph_layout:
:paramtype flow_graph_layout: ~flow.models.FlowGraphLayout
:keyword flow_name:
:paramtype flow_name: str
:keyword flow_run_resource_id:
:paramtype flow_run_resource_id: str
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:paramtype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword runtime_name:
:paramtype runtime_name: str
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword output_datastore_name:
:paramtype output_datastore_name: str
:keyword child_run_base_path:
:paramtype child_run_base_path: str
:keyword working_directory:
:paramtype working_directory: str
:keyword flow_dag_file_relative_path:
:paramtype flow_dag_file_relative_path: str
:keyword flow_snapshot_id:
:paramtype flow_snapshot_id: str
:keyword studio_portal_endpoint:
:paramtype studio_portal_endpoint: str
"""
super(FlowRunInfo, self).__init__(**kwargs)
self.flow_graph = kwargs.get('flow_graph', None)
self.flow_graph_layout = kwargs.get('flow_graph_layout', None)
self.flow_name = kwargs.get('flow_name', None)
self.flow_run_resource_id = kwargs.get('flow_run_resource_id', None)
self.flow_run_id = kwargs.get('flow_run_id', None)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.batch_inputs = kwargs.get('batch_inputs', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
self.flow_run_type = kwargs.get('flow_run_type', None)
self.flow_type = kwargs.get('flow_type', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.bulk_test_id = kwargs.get('bulk_test_id', None)
self.created_by = kwargs.get('created_by', None)
self.created_on = kwargs.get('created_on', None)
self.inputs_mapping = kwargs.get('inputs_mapping', None)
self.output_datastore_name = kwargs.get('output_datastore_name', None)
self.child_run_base_path = kwargs.get('child_run_base_path', None)
self.working_directory = kwargs.get('working_directory', None)
self.flow_dag_file_relative_path = kwargs.get('flow_dag_file_relative_path', None)
self.flow_snapshot_id = kwargs.get('flow_snapshot_id', None)
self.studio_portal_endpoint = kwargs.get('studio_portal_endpoint', None)
class FlowRunResult(msrest.serialization.Model):
"""FlowRunResult.
:ivar flow_runs:
:vartype flow_runs: list[any]
:ivar node_runs:
:vartype node_runs: list[any]
:ivar error_response: The error response.
:vartype error_response: ~flow.models.ErrorResponse
:ivar flow_name:
:vartype flow_name: str
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_graph:
:vartype flow_graph: ~flow.models.FlowGraph
:ivar flow_graph_layout:
:vartype flow_graph_layout: ~flow.models.FlowGraphLayout
:ivar flow_run_resource_id:
:vartype flow_run_resource_id: str
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:vartype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar runtime_name:
:vartype runtime_name: str
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar flow_run_logs: Dictionary of :code:`<string>`.
:vartype flow_run_logs: dict[str, str]
:ivar flow_test_mode: Possible values include: "Sync", "Async".
:vartype flow_test_mode: str or ~flow.models.FlowTestMode
:ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:ivar working_directory:
:vartype working_directory: str
:ivar flow_dag_file_relative_path:
:vartype flow_dag_file_relative_path: str
:ivar flow_snapshot_id:
:vartype flow_snapshot_id: str
:ivar variant_run_to_evaluation_runs_id_mapping: Dictionary of
<components·1k1eaeg·schemas·flowrunresult·properties·variantruntoevaluationrunsidmapping·additionalproperties>.
:vartype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]]
"""
_attribute_map = {
'flow_runs': {'key': 'flow_runs', 'type': '[object]'},
'node_runs': {'key': 'node_runs', 'type': '[object]'},
'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'},
'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'},
'flow_run_resource_id': {'key': 'flowRunResourceId', 'type': 'str'},
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'batch_inputs': {'key': 'batchInputs', 'type': '[{object}]'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'flow_run_type': {'key': 'flowRunType', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'flow_run_logs': {'key': 'flowRunLogs', 'type': '{str}'},
'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'},
'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'},
'working_directory': {'key': 'workingDirectory', 'type': 'str'},
'flow_dag_file_relative_path': {'key': 'flowDagFileRelativePath', 'type': 'str'},
'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'},
'variant_run_to_evaluation_runs_id_mapping': {'key': 'variantRunToEvaluationRunsIdMapping', 'type': '{[str]}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_runs:
:paramtype flow_runs: list[any]
:keyword node_runs:
:paramtype node_runs: list[any]
:keyword error_response: The error response.
:paramtype error_response: ~flow.models.ErrorResponse
:keyword flow_name:
:paramtype flow_name: str
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_graph:
:paramtype flow_graph: ~flow.models.FlowGraph
:keyword flow_graph_layout:
:paramtype flow_graph_layout: ~flow.models.FlowGraphLayout
:keyword flow_run_resource_id:
:paramtype flow_run_resource_id: str
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:paramtype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword runtime_name:
:paramtype runtime_name: str
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword flow_run_logs: Dictionary of :code:`<string>`.
:paramtype flow_run_logs: dict[str, str]
:keyword flow_test_mode: Possible values include: "Sync", "Async".
:paramtype flow_test_mode: str or ~flow.models.FlowTestMode
:keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:keyword working_directory:
:paramtype working_directory: str
:keyword flow_dag_file_relative_path:
:paramtype flow_dag_file_relative_path: str
:keyword flow_snapshot_id:
:paramtype flow_snapshot_id: str
:keyword variant_run_to_evaluation_runs_id_mapping: Dictionary of
<components·1k1eaeg·schemas·flowrunresult·properties·variantruntoevaluationrunsidmapping·additionalproperties>.
:paramtype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]]
"""
super(FlowRunResult, self).__init__(**kwargs)
self.flow_runs = kwargs.get('flow_runs', None)
self.node_runs = kwargs.get('node_runs', None)
self.error_response = kwargs.get('error_response', None)
self.flow_name = kwargs.get('flow_name', None)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.flow_run_id = kwargs.get('flow_run_id', None)
self.flow_graph = kwargs.get('flow_graph', None)
self.flow_graph_layout = kwargs.get('flow_graph_layout', None)
self.flow_run_resource_id = kwargs.get('flow_run_resource_id', None)
self.bulk_test_id = kwargs.get('bulk_test_id', None)
self.batch_inputs = kwargs.get('batch_inputs', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
self.created_by = kwargs.get('created_by', None)
self.created_on = kwargs.get('created_on', None)
self.flow_run_type = kwargs.get('flow_run_type', None)
self.flow_type = kwargs.get('flow_type', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.aml_compute_name = kwargs.get('aml_compute_name', None)
self.flow_run_logs = kwargs.get('flow_run_logs', None)
self.flow_test_mode = kwargs.get('flow_test_mode', None)
self.flow_test_infos = kwargs.get('flow_test_infos', None)
self.working_directory = kwargs.get('working_directory', None)
self.flow_dag_file_relative_path = kwargs.get('flow_dag_file_relative_path', None)
self.flow_snapshot_id = kwargs.get('flow_snapshot_id', None)
self.variant_run_to_evaluation_runs_id_mapping = kwargs.get('variant_run_to_evaluation_runs_id_mapping', None)
class FlowRunSettings(msrest.serialization.Model):
"""FlowRunSettings.
:ivar run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest", "Eval",
"PairwiseEval", "ExperimentTest", "ExperimentEval".
:vartype run_mode: str or ~flow.models.FlowRunMode
:ivar tuning_node_names:
:vartype tuning_node_names: list[str]
:ivar tuning_node_settings: This is a dictionary.
:vartype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting]
:ivar baseline_variant_id:
:vartype baseline_variant_id: str
:ivar default_variant_id:
:vartype default_variant_id: str
:ivar variants: This is a dictionary.
:vartype variants: dict[str, list[~flow.models.Node]]
:ivar node_name:
:vartype node_name: str
:ivar is_default_variant:
:vartype is_default_variant: bool
:ivar node_variant_id:
:vartype node_variant_id: str
:ivar node_output_paths: Dictionary of :code:`<string>`.
:vartype node_output_paths: dict[str, str]
:ivar base_flow_run_id:
:vartype base_flow_run_id: str
:ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar evaluation_flow_run_settings: This is a dictionary.
:vartype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings]
:ivar bulk_test_flow_id:
:vartype bulk_test_flow_id: str
:ivar bulk_test_flow_run_ids:
:vartype bulk_test_flow_run_ids: list[str]
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar input_universal_link:
:vartype input_universal_link: str
:ivar data_inputs: This is a dictionary.
:vartype data_inputs: dict[str, str]
:ivar flow_run_output_directory:
:vartype flow_run_output_directory: str
:ivar connection_overrides:
:vartype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar runtime_name:
:vartype runtime_name: str
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar connections: This is a dictionary.
:vartype connections: dict[str, dict[str, str]]
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, str]
:ivar output_data_store:
:vartype output_data_store: str
:ivar run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:vartype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar worker_count:
:vartype worker_count: int
:ivar timeout_in_seconds:
:vartype timeout_in_seconds: int
:ivar promptflow_engine_type: Possible values include: "FastEngine", "ScalableEngine".
:vartype promptflow_engine_type: str or ~flow.models.PromptflowEngineType
"""
_attribute_map = {
'run_mode': {'key': 'runMode', 'type': 'str'},
'tuning_node_names': {'key': 'tuningNodeNames', 'type': '[str]'},
'tuning_node_settings': {'key': 'tuningNodeSettings', 'type': '{TuningNodeSetting}'},
'baseline_variant_id': {'key': 'baselineVariantId', 'type': 'str'},
'default_variant_id': {'key': 'defaultVariantId', 'type': 'str'},
'variants': {'key': 'variants', 'type': '{[Node]}'},
'node_name': {'key': 'nodeName', 'type': 'str'},
'is_default_variant': {'key': 'isDefaultVariant', 'type': 'bool'},
'node_variant_id': {'key': 'nodeVariantId', 'type': 'str'},
'node_output_paths': {'key': 'nodeOutputPaths', 'type': '{str}'},
'base_flow_run_id': {'key': 'baseFlowRunId', 'type': 'str'},
'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'},
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'evaluation_flow_run_settings': {'key': 'evaluationFlowRunSettings', 'type': '{EvaluationFlowRunSettings}'},
'bulk_test_flow_id': {'key': 'bulkTestFlowId', 'type': 'str'},
'bulk_test_flow_run_ids': {'key': 'bulkTestFlowRunIds', 'type': '[str]'},
'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'},
'input_universal_link': {'key': 'inputUniversalLink', 'type': 'str'},
'data_inputs': {'key': 'dataInputs', 'type': '{str}'},
'flow_run_output_directory': {'key': 'flowRunOutputDirectory', 'type': 'str'},
'connection_overrides': {'key': 'connectionOverrides', 'type': '[ConnectionOverrideSetting]'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'connections': {'key': 'connections', 'type': '{{str}}'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'output_data_store': {'key': 'outputDataStore', 'type': 'str'},
'run_display_name_generation_type': {'key': 'runDisplayNameGenerationType', 'type': 'str'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'worker_count': {'key': 'workerCount', 'type': 'int'},
'timeout_in_seconds': {'key': 'timeoutInSeconds', 'type': 'int'},
'promptflow_engine_type': {'key': 'promptflowEngineType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest",
"Eval", "PairwiseEval", "ExperimentTest", "ExperimentEval".
:paramtype run_mode: str or ~flow.models.FlowRunMode
:keyword tuning_node_names:
:paramtype tuning_node_names: list[str]
:keyword tuning_node_settings: This is a dictionary.
:paramtype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting]
:keyword baseline_variant_id:
:paramtype baseline_variant_id: str
:keyword default_variant_id:
:paramtype default_variant_id: str
:keyword variants: This is a dictionary.
:paramtype variants: dict[str, list[~flow.models.Node]]
:keyword node_name:
:paramtype node_name: str
:keyword is_default_variant:
:paramtype is_default_variant: bool
:keyword node_variant_id:
:paramtype node_variant_id: str
:keyword node_output_paths: Dictionary of :code:`<string>`.
:paramtype node_output_paths: dict[str, str]
:keyword base_flow_run_id:
:paramtype base_flow_run_id: str
:keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword evaluation_flow_run_settings: This is a dictionary.
:paramtype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings]
:keyword bulk_test_flow_id:
:paramtype bulk_test_flow_id: str
:keyword bulk_test_flow_run_ids:
:paramtype bulk_test_flow_run_ids: list[str]
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword input_universal_link:
:paramtype input_universal_link: str
:keyword data_inputs: This is a dictionary.
:paramtype data_inputs: dict[str, str]
:keyword flow_run_output_directory:
:paramtype flow_run_output_directory: str
:keyword connection_overrides:
:paramtype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword runtime_name:
:paramtype runtime_name: str
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword connections: This is a dictionary.
:paramtype connections: dict[str, dict[str, str]]
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, str]
:keyword output_data_store:
:paramtype output_data_store: str
:keyword run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:paramtype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword worker_count:
:paramtype worker_count: int
:keyword timeout_in_seconds:
:paramtype timeout_in_seconds: int
:keyword promptflow_engine_type: Possible values include: "FastEngine", "ScalableEngine".
:paramtype promptflow_engine_type: str or ~flow.models.PromptflowEngineType
"""
super(FlowRunSettings, self).__init__(**kwargs)
self.run_mode = kwargs.get('run_mode', None)
self.tuning_node_names = kwargs.get('tuning_node_names', None)
self.tuning_node_settings = kwargs.get('tuning_node_settings', None)
self.baseline_variant_id = kwargs.get('baseline_variant_id', None)
self.default_variant_id = kwargs.get('default_variant_id', None)
self.variants = kwargs.get('variants', None)
self.node_name = kwargs.get('node_name', None)
self.is_default_variant = kwargs.get('is_default_variant', None)
self.node_variant_id = kwargs.get('node_variant_id', None)
self.node_output_paths = kwargs.get('node_output_paths', None)
self.base_flow_run_id = kwargs.get('base_flow_run_id', None)
self.flow_test_infos = kwargs.get('flow_test_infos', None)
self.bulk_test_id = kwargs.get('bulk_test_id', None)
self.evaluation_flow_run_settings = kwargs.get('evaluation_flow_run_settings', None)
self.bulk_test_flow_id = kwargs.get('bulk_test_flow_id', None)
self.bulk_test_flow_run_ids = kwargs.get('bulk_test_flow_run_ids', None)
self.batch_inputs = kwargs.get('batch_inputs', None)
self.input_universal_link = kwargs.get('input_universal_link', None)
self.data_inputs = kwargs.get('data_inputs', None)
self.flow_run_output_directory = kwargs.get('flow_run_output_directory', None)
self.connection_overrides = kwargs.get('connection_overrides', None)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
self.inputs_mapping = kwargs.get('inputs_mapping', None)
self.connections = kwargs.get('connections', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.output_data_store = kwargs.get('output_data_store', None)
self.run_display_name_generation_type = kwargs.get('run_display_name_generation_type', None)
self.aml_compute_name = kwargs.get('aml_compute_name', None)
self.worker_count = kwargs.get('worker_count', None)
self.timeout_in_seconds = kwargs.get('timeout_in_seconds', None)
self.promptflow_engine_type = kwargs.get('promptflow_engine_type', None)
class FlowRunSettingsBase(msrest.serialization.Model):
"""FlowRunSettingsBase.
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar input_universal_link:
:vartype input_universal_link: str
:ivar data_inputs: This is a dictionary.
:vartype data_inputs: dict[str, str]
:ivar flow_run_output_directory:
:vartype flow_run_output_directory: str
:ivar connection_overrides:
:vartype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar runtime_name:
:vartype runtime_name: str
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar connections: This is a dictionary.
:vartype connections: dict[str, dict[str, str]]
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, str]
:ivar output_data_store:
:vartype output_data_store: str
:ivar run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:vartype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar worker_count:
:vartype worker_count: int
:ivar timeout_in_seconds:
:vartype timeout_in_seconds: int
:ivar promptflow_engine_type: Possible values include: "FastEngine", "ScalableEngine".
:vartype promptflow_engine_type: str or ~flow.models.PromptflowEngineType
"""
_attribute_map = {
'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'},
'input_universal_link': {'key': 'inputUniversalLink', 'type': 'str'},
'data_inputs': {'key': 'dataInputs', 'type': '{str}'},
'flow_run_output_directory': {'key': 'flowRunOutputDirectory', 'type': 'str'},
'connection_overrides': {'key': 'connectionOverrides', 'type': '[ConnectionOverrideSetting]'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'connections': {'key': 'connections', 'type': '{{str}}'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'output_data_store': {'key': 'outputDataStore', 'type': 'str'},
'run_display_name_generation_type': {'key': 'runDisplayNameGenerationType', 'type': 'str'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'worker_count': {'key': 'workerCount', 'type': 'int'},
'timeout_in_seconds': {'key': 'timeoutInSeconds', 'type': 'int'},
'promptflow_engine_type': {'key': 'promptflowEngineType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword input_universal_link:
:paramtype input_universal_link: str
:keyword data_inputs: This is a dictionary.
:paramtype data_inputs: dict[str, str]
:keyword flow_run_output_directory:
:paramtype flow_run_output_directory: str
:keyword connection_overrides:
:paramtype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword runtime_name:
:paramtype runtime_name: str
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword connections: This is a dictionary.
:paramtype connections: dict[str, dict[str, str]]
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, str]
:keyword output_data_store:
:paramtype output_data_store: str
:keyword run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:paramtype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword worker_count:
:paramtype worker_count: int
:keyword timeout_in_seconds:
:paramtype timeout_in_seconds: int
:keyword promptflow_engine_type: Possible values include: "FastEngine", "ScalableEngine".
:paramtype promptflow_engine_type: str or ~flow.models.PromptflowEngineType
"""
super(FlowRunSettingsBase, self).__init__(**kwargs)
self.batch_inputs = kwargs.get('batch_inputs', None)
self.input_universal_link = kwargs.get('input_universal_link', None)
self.data_inputs = kwargs.get('data_inputs', None)
self.flow_run_output_directory = kwargs.get('flow_run_output_directory', None)
self.connection_overrides = kwargs.get('connection_overrides', None)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
self.inputs_mapping = kwargs.get('inputs_mapping', None)
self.connections = kwargs.get('connections', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.output_data_store = kwargs.get('output_data_store', None)
self.run_display_name_generation_type = kwargs.get('run_display_name_generation_type', None)
self.aml_compute_name = kwargs.get('aml_compute_name', None)
self.worker_count = kwargs.get('worker_count', None)
self.timeout_in_seconds = kwargs.get('timeout_in_seconds', None)
self.promptflow_engine_type = kwargs.get('promptflow_engine_type', None)
class FlowRunStatusResponse(msrest.serialization.Model):
"""FlowRunStatusResponse.
:ivar flow_run_status: Possible values include: "Started", "Completed", "Failed", "Cancelled",
"NotStarted", "Running", "Queued", "Paused", "Unapproved", "Starting", "Preparing",
"CancelRequested", "Pausing", "Finalizing", "Canceled", "Bypassed".
:vartype flow_run_status: str or ~flow.models.FlowRunStatusEnum
:ivar last_checked_time:
:vartype last_checked_time: ~datetime.datetime
:ivar flow_run_created_time:
:vartype flow_run_created_time: ~datetime.datetime
"""
_attribute_map = {
'flow_run_status': {'key': 'flowRunStatus', 'type': 'str'},
'last_checked_time': {'key': 'lastCheckedTime', 'type': 'iso-8601'},
'flow_run_created_time': {'key': 'flowRunCreatedTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_run_status: Possible values include: "Started", "Completed", "Failed",
"Cancelled", "NotStarted", "Running", "Queued", "Paused", "Unapproved", "Starting",
"Preparing", "CancelRequested", "Pausing", "Finalizing", "Canceled", "Bypassed".
:paramtype flow_run_status: str or ~flow.models.FlowRunStatusEnum
:keyword last_checked_time:
:paramtype last_checked_time: ~datetime.datetime
:keyword flow_run_created_time:
:paramtype flow_run_created_time: ~datetime.datetime
"""
super(FlowRunStatusResponse, self).__init__(**kwargs)
self.flow_run_status = kwargs.get('flow_run_status', None)
self.last_checked_time = kwargs.get('last_checked_time', None)
self.flow_run_created_time = kwargs.get('flow_run_created_time', None)
class FlowRuntimeCapability(msrest.serialization.Model):
"""FlowRuntimeCapability.
:ivar flow_features:
:vartype flow_features: list[~flow.models.FlowFeature]
"""
_attribute_map = {
'flow_features': {'key': 'flowFeatures', 'type': '[FlowFeature]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_features:
:paramtype flow_features: list[~flow.models.FlowFeature]
"""
super(FlowRuntimeCapability, self).__init__(**kwargs)
self.flow_features = kwargs.get('flow_features', None)
class FlowRuntimeDto(msrest.serialization.Model):
"""FlowRuntimeDto.
:ivar runtime_name:
:vartype runtime_name: str
:ivar runtime_description:
:vartype runtime_description: str
:ivar runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:vartype runtime_type: str or ~flow.models.RuntimeType
:ivar environment:
:vartype environment: str
:ivar status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting",
"Stopping".
:vartype status: str or ~flow.models.RuntimeStatusEnum
:ivar status_message:
:vartype status_message: str
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
:ivar from_existing_endpoint:
:vartype from_existing_endpoint: bool
:ivar endpoint_name:
:vartype endpoint_name: str
:ivar from_existing_deployment:
:vartype from_existing_deployment: bool
:ivar deployment_name:
:vartype deployment_name: str
:ivar identity:
:vartype identity: ~flow.models.ManagedServiceIdentity
:ivar instance_type:
:vartype instance_type: str
:ivar instance_count:
:vartype instance_count: int
:ivar compute_instance_name:
:vartype compute_instance_name: str
:ivar docker_image:
:vartype docker_image: str
:ivar published_port:
:vartype published_port: int
:ivar target_port:
:vartype target_port: int
:ivar from_existing_custom_app:
:vartype from_existing_custom_app: bool
:ivar custom_app_name:
:vartype custom_app_name: str
:ivar assigned_to:
:vartype assigned_to: ~flow.models.AssignedUser
:ivar endpoint_url:
:vartype endpoint_url: str
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar modified_on:
:vartype modified_on: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
"""
_attribute_map = {
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'runtime_description': {'key': 'runtimeDescription', 'type': 'str'},
'runtime_type': {'key': 'runtimeType', 'type': 'str'},
'environment': {'key': 'environment', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'status_message': {'key': 'statusMessage', 'type': 'str'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
'from_existing_endpoint': {'key': 'fromExistingEndpoint', 'type': 'bool'},
'endpoint_name': {'key': 'endpointName', 'type': 'str'},
'from_existing_deployment': {'key': 'fromExistingDeployment', 'type': 'bool'},
'deployment_name': {'key': 'deploymentName', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'compute_instance_name': {'key': 'computeInstanceName', 'type': 'str'},
'docker_image': {'key': 'dockerImage', 'type': 'str'},
'published_port': {'key': 'publishedPort', 'type': 'int'},
'target_port': {'key': 'targetPort', 'type': 'int'},
'from_existing_custom_app': {'key': 'fromExistingCustomApp', 'type': 'bool'},
'custom_app_name': {'key': 'customAppName', 'type': 'str'},
'assigned_to': {'key': 'assignedTo', 'type': 'AssignedUser'},
'endpoint_url': {'key': 'endpointUrl', 'type': 'str'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
}
def __init__(
self,
**kwargs
):
"""
:keyword runtime_name:
:paramtype runtime_name: str
:keyword runtime_description:
:paramtype runtime_description: str
:keyword runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:paramtype runtime_type: str or ~flow.models.RuntimeType
:keyword environment:
:paramtype environment: str
:keyword status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting",
"Stopping".
:paramtype status: str or ~flow.models.RuntimeStatusEnum
:keyword status_message:
:paramtype status_message: str
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
:keyword from_existing_endpoint:
:paramtype from_existing_endpoint: bool
:keyword endpoint_name:
:paramtype endpoint_name: str
:keyword from_existing_deployment:
:paramtype from_existing_deployment: bool
:keyword deployment_name:
:paramtype deployment_name: str
:keyword identity:
:paramtype identity: ~flow.models.ManagedServiceIdentity
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_count:
:paramtype instance_count: int
:keyword compute_instance_name:
:paramtype compute_instance_name: str
:keyword docker_image:
:paramtype docker_image: str
:keyword published_port:
:paramtype published_port: int
:keyword target_port:
:paramtype target_port: int
:keyword from_existing_custom_app:
:paramtype from_existing_custom_app: bool
:keyword custom_app_name:
:paramtype custom_app_name: str
:keyword assigned_to:
:paramtype assigned_to: ~flow.models.AssignedUser
:keyword endpoint_url:
:paramtype endpoint_url: str
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword modified_on:
:paramtype modified_on: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
"""
super(FlowRuntimeDto, self).__init__(**kwargs)
self.runtime_name = kwargs.get('runtime_name', None)
self.runtime_description = kwargs.get('runtime_description', None)
self.runtime_type = kwargs.get('runtime_type', None)
self.environment = kwargs.get('environment', None)
self.status = kwargs.get('status', None)
self.status_message = kwargs.get('status_message', None)
self.error = kwargs.get('error', None)
self.from_existing_endpoint = kwargs.get('from_existing_endpoint', None)
self.endpoint_name = kwargs.get('endpoint_name', None)
self.from_existing_deployment = kwargs.get('from_existing_deployment', None)
self.deployment_name = kwargs.get('deployment_name', None)
self.identity = kwargs.get('identity', None)
self.instance_type = kwargs.get('instance_type', None)
self.instance_count = kwargs.get('instance_count', None)
self.compute_instance_name = kwargs.get('compute_instance_name', None)
self.docker_image = kwargs.get('docker_image', None)
self.published_port = kwargs.get('published_port', None)
self.target_port = kwargs.get('target_port', None)
self.from_existing_custom_app = kwargs.get('from_existing_custom_app', None)
self.custom_app_name = kwargs.get('custom_app_name', None)
self.assigned_to = kwargs.get('assigned_to', None)
self.endpoint_url = kwargs.get('endpoint_url', None)
self.created_on = kwargs.get('created_on', None)
self.modified_on = kwargs.get('modified_on', None)
self.owner = kwargs.get('owner', None)
class FlowSampleDto(msrest.serialization.Model):
"""FlowSampleDto.
:ivar sample_resource_id:
:vartype sample_resource_id: str
:ivar section: Possible values include: "Gallery", "Template".
:vartype section: str or ~flow.models.Section
:ivar index_number:
:vartype index_number: int
:ivar flow_name:
:vartype flow_name: str
:ivar description:
:vartype description: str
:ivar details:
:vartype details: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar flow_run_settings:
:vartype flow_run_settings: ~flow.models.FlowRunSettings
:ivar is_archived:
:vartype is_archived: bool
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'sample_resource_id': {'key': 'sampleResourceId', 'type': 'str'},
'section': {'key': 'section', 'type': 'str'},
'index_number': {'key': 'indexNumber', 'type': 'int'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'details': {'key': 'details', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword sample_resource_id:
:paramtype sample_resource_id: str
:keyword section: Possible values include: "Gallery", "Template".
:paramtype section: str or ~flow.models.Section
:keyword index_number:
:paramtype index_number: int
:keyword flow_name:
:paramtype flow_name: str
:keyword description:
:paramtype description: str
:keyword details:
:paramtype details: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword flow_run_settings:
:paramtype flow_run_settings: ~flow.models.FlowRunSettings
:keyword is_archived:
:paramtype is_archived: bool
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(FlowSampleDto, self).__init__(**kwargs)
self.sample_resource_id = kwargs.get('sample_resource_id', None)
self.section = kwargs.get('section', None)
self.index_number = kwargs.get('index_number', None)
self.flow_name = kwargs.get('flow_name', None)
self.description = kwargs.get('description', None)
self.details = kwargs.get('details', None)
self.tags = kwargs.get('tags', None)
self.flow = kwargs.get('flow', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.flow_type = kwargs.get('flow_type', None)
self.flow_run_settings = kwargs.get('flow_run_settings', None)
self.is_archived = kwargs.get('is_archived', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.identity = kwargs.get('identity', None)
class FlowSessionDto(msrest.serialization.Model):
"""FlowSessionDto.
:ivar session_id:
:vartype session_id: str
:ivar base_image:
:vartype base_image: str
:ivar packages:
:vartype packages: list[str]
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar compute_name:
:vartype compute_name: str
:ivar flow_features:
:vartype flow_features: list[~flow.models.FlowFeature]
:ivar runtime_name:
:vartype runtime_name: str
:ivar runtime_description:
:vartype runtime_description: str
:ivar runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:vartype runtime_type: str or ~flow.models.RuntimeType
:ivar environment:
:vartype environment: str
:ivar status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting",
"Stopping".
:vartype status: str or ~flow.models.RuntimeStatusEnum
:ivar status_message:
:vartype status_message: str
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
:ivar from_existing_endpoint:
:vartype from_existing_endpoint: bool
:ivar endpoint_name:
:vartype endpoint_name: str
:ivar from_existing_deployment:
:vartype from_existing_deployment: bool
:ivar deployment_name:
:vartype deployment_name: str
:ivar identity:
:vartype identity: ~flow.models.ManagedServiceIdentity
:ivar instance_type:
:vartype instance_type: str
:ivar instance_count:
:vartype instance_count: int
:ivar compute_instance_name:
:vartype compute_instance_name: str
:ivar docker_image:
:vartype docker_image: str
:ivar published_port:
:vartype published_port: int
:ivar target_port:
:vartype target_port: int
:ivar from_existing_custom_app:
:vartype from_existing_custom_app: bool
:ivar custom_app_name:
:vartype custom_app_name: str
:ivar assigned_to:
:vartype assigned_to: ~flow.models.AssignedUser
:ivar endpoint_url:
:vartype endpoint_url: str
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar modified_on:
:vartype modified_on: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
"""
_attribute_map = {
'session_id': {'key': 'sessionId', 'type': 'str'},
'base_image': {'key': 'baseImage', 'type': 'str'},
'packages': {'key': 'packages', 'type': '[str]'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'compute_name': {'key': 'computeName', 'type': 'str'},
'flow_features': {'key': 'flowFeatures', 'type': '[FlowFeature]'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'runtime_description': {'key': 'runtimeDescription', 'type': 'str'},
'runtime_type': {'key': 'runtimeType', 'type': 'str'},
'environment': {'key': 'environment', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'status_message': {'key': 'statusMessage', 'type': 'str'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
'from_existing_endpoint': {'key': 'fromExistingEndpoint', 'type': 'bool'},
'endpoint_name': {'key': 'endpointName', 'type': 'str'},
'from_existing_deployment': {'key': 'fromExistingDeployment', 'type': 'bool'},
'deployment_name': {'key': 'deploymentName', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'compute_instance_name': {'key': 'computeInstanceName', 'type': 'str'},
'docker_image': {'key': 'dockerImage', 'type': 'str'},
'published_port': {'key': 'publishedPort', 'type': 'int'},
'target_port': {'key': 'targetPort', 'type': 'int'},
'from_existing_custom_app': {'key': 'fromExistingCustomApp', 'type': 'bool'},
'custom_app_name': {'key': 'customAppName', 'type': 'str'},
'assigned_to': {'key': 'assignedTo', 'type': 'AssignedUser'},
'endpoint_url': {'key': 'endpointUrl', 'type': 'str'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
}
def __init__(
self,
**kwargs
):
"""
:keyword session_id:
:paramtype session_id: str
:keyword base_image:
:paramtype base_image: str
:keyword packages:
:paramtype packages: list[str]
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword compute_name:
:paramtype compute_name: str
:keyword flow_features:
:paramtype flow_features: list[~flow.models.FlowFeature]
:keyword runtime_name:
:paramtype runtime_name: str
:keyword runtime_description:
:paramtype runtime_description: str
:keyword runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:paramtype runtime_type: str or ~flow.models.RuntimeType
:keyword environment:
:paramtype environment: str
:keyword status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting",
"Stopping".
:paramtype status: str or ~flow.models.RuntimeStatusEnum
:keyword status_message:
:paramtype status_message: str
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
:keyword from_existing_endpoint:
:paramtype from_existing_endpoint: bool
:keyword endpoint_name:
:paramtype endpoint_name: str
:keyword from_existing_deployment:
:paramtype from_existing_deployment: bool
:keyword deployment_name:
:paramtype deployment_name: str
:keyword identity:
:paramtype identity: ~flow.models.ManagedServiceIdentity
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_count:
:paramtype instance_count: int
:keyword compute_instance_name:
:paramtype compute_instance_name: str
:keyword docker_image:
:paramtype docker_image: str
:keyword published_port:
:paramtype published_port: int
:keyword target_port:
:paramtype target_port: int
:keyword from_existing_custom_app:
:paramtype from_existing_custom_app: bool
:keyword custom_app_name:
:paramtype custom_app_name: str
:keyword assigned_to:
:paramtype assigned_to: ~flow.models.AssignedUser
:keyword endpoint_url:
:paramtype endpoint_url: str
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword modified_on:
:paramtype modified_on: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
"""
super(FlowSessionDto, self).__init__(**kwargs)
self.session_id = kwargs.get('session_id', None)
self.base_image = kwargs.get('base_image', None)
self.packages = kwargs.get('packages', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.compute_name = kwargs.get('compute_name', None)
self.flow_features = kwargs.get('flow_features', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.runtime_description = kwargs.get('runtime_description', None)
self.runtime_type = kwargs.get('runtime_type', None)
self.environment = kwargs.get('environment', None)
self.status = kwargs.get('status', None)
self.status_message = kwargs.get('status_message', None)
self.error = kwargs.get('error', None)
self.from_existing_endpoint = kwargs.get('from_existing_endpoint', None)
self.endpoint_name = kwargs.get('endpoint_name', None)
self.from_existing_deployment = kwargs.get('from_existing_deployment', None)
self.deployment_name = kwargs.get('deployment_name', None)
self.identity = kwargs.get('identity', None)
self.instance_type = kwargs.get('instance_type', None)
self.instance_count = kwargs.get('instance_count', None)
self.compute_instance_name = kwargs.get('compute_instance_name', None)
self.docker_image = kwargs.get('docker_image', None)
self.published_port = kwargs.get('published_port', None)
self.target_port = kwargs.get('target_port', None)
self.from_existing_custom_app = kwargs.get('from_existing_custom_app', None)
self.custom_app_name = kwargs.get('custom_app_name', None)
self.assigned_to = kwargs.get('assigned_to', None)
self.endpoint_url = kwargs.get('endpoint_url', None)
self.created_on = kwargs.get('created_on', None)
self.modified_on = kwargs.get('modified_on', None)
self.owner = kwargs.get('owner', None)
class FlowSnapshot(msrest.serialization.Model):
"""FlowSnapshot.
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.FlowInputDefinition]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.FlowOutputDefinition]
:ivar nodes:
:vartype nodes: list[~flow.models.FlowNode]
:ivar node_variants: This is a dictionary.
:vartype node_variants: dict[str, ~flow.models.FlowNodeVariant]
:ivar environment:
:vartype environment: ~flow.models.FlowEnvironment
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, any]
:ivar language: Possible values include: "Python", "CSharp", "TypeScript", "JavaScript".
:vartype language: str or ~flow.models.FlowLanguage
:ivar path:
:vartype path: str
:ivar entry:
:vartype entry: str
"""
_attribute_map = {
'inputs': {'key': 'inputs', 'type': '{FlowInputDefinition}'},
'outputs': {'key': 'outputs', 'type': '{FlowOutputDefinition}'},
'nodes': {'key': 'nodes', 'type': '[FlowNode]'},
'node_variants': {'key': 'node_variants', 'type': '{FlowNodeVariant}'},
'environment': {'key': 'environment', 'type': 'FlowEnvironment'},
'environment_variables': {'key': 'environment_variables', 'type': '{object}'},
'language': {'key': 'language', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
'entry': {'key': 'entry', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.FlowInputDefinition]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.FlowOutputDefinition]
:keyword nodes:
:paramtype nodes: list[~flow.models.FlowNode]
:keyword node_variants: This is a dictionary.
:paramtype node_variants: dict[str, ~flow.models.FlowNodeVariant]
:keyword environment:
:paramtype environment: ~flow.models.FlowEnvironment
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, any]
:keyword language: Possible values include: "Python", "CSharp", "TypeScript", "JavaScript".
:paramtype language: str or ~flow.models.FlowLanguage
:keyword path:
:paramtype path: str
:keyword entry:
:paramtype entry: str
"""
super(FlowSnapshot, self).__init__(**kwargs)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.nodes = kwargs.get('nodes', None)
self.node_variants = kwargs.get('node_variants', None)
self.environment = kwargs.get('environment', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.language = kwargs.get('language', None)
self.path = kwargs.get('path', None)
self.entry = kwargs.get('entry', None)
class FlowSubmitRunSettings(msrest.serialization.Model):
"""FlowSubmitRunSettings.
:ivar node_inputs: This is a dictionary.
:vartype node_inputs: dict[str, any]
:ivar run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest", "Eval",
"PairwiseEval", "ExperimentTest", "ExperimentEval".
:vartype run_mode: str or ~flow.models.FlowRunMode
:ivar tuning_node_names:
:vartype tuning_node_names: list[str]
:ivar tuning_node_settings: This is a dictionary.
:vartype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting]
:ivar baseline_variant_id:
:vartype baseline_variant_id: str
:ivar default_variant_id:
:vartype default_variant_id: str
:ivar variants: This is a dictionary.
:vartype variants: dict[str, list[~flow.models.Node]]
:ivar node_name:
:vartype node_name: str
:ivar is_default_variant:
:vartype is_default_variant: bool
:ivar node_variant_id:
:vartype node_variant_id: str
:ivar node_output_paths: Dictionary of :code:`<string>`.
:vartype node_output_paths: dict[str, str]
:ivar base_flow_run_id:
:vartype base_flow_run_id: str
:ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar evaluation_flow_run_settings: This is a dictionary.
:vartype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings]
:ivar bulk_test_flow_id:
:vartype bulk_test_flow_id: str
:ivar bulk_test_flow_run_ids:
:vartype bulk_test_flow_run_ids: list[str]
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar input_universal_link:
:vartype input_universal_link: str
:ivar data_inputs: This is a dictionary.
:vartype data_inputs: dict[str, str]
:ivar flow_run_output_directory:
:vartype flow_run_output_directory: str
:ivar connection_overrides:
:vartype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar runtime_name:
:vartype runtime_name: str
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar connections: This is a dictionary.
:vartype connections: dict[str, dict[str, str]]
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, str]
:ivar output_data_store:
:vartype output_data_store: str
:ivar run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:vartype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar worker_count:
:vartype worker_count: int
:ivar timeout_in_seconds:
:vartype timeout_in_seconds: int
:ivar promptflow_engine_type: Possible values include: "FastEngine", "ScalableEngine".
:vartype promptflow_engine_type: str or ~flow.models.PromptflowEngineType
"""
_attribute_map = {
'node_inputs': {'key': 'nodeInputs', 'type': '{object}'},
'run_mode': {'key': 'runMode', 'type': 'str'},
'tuning_node_names': {'key': 'tuningNodeNames', 'type': '[str]'},
'tuning_node_settings': {'key': 'tuningNodeSettings', 'type': '{TuningNodeSetting}'},
'baseline_variant_id': {'key': 'baselineVariantId', 'type': 'str'},
'default_variant_id': {'key': 'defaultVariantId', 'type': 'str'},
'variants': {'key': 'variants', 'type': '{[Node]}'},
'node_name': {'key': 'nodeName', 'type': 'str'},
'is_default_variant': {'key': 'isDefaultVariant', 'type': 'bool'},
'node_variant_id': {'key': 'nodeVariantId', 'type': 'str'},
'node_output_paths': {'key': 'nodeOutputPaths', 'type': '{str}'},
'base_flow_run_id': {'key': 'baseFlowRunId', 'type': 'str'},
'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'},
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'evaluation_flow_run_settings': {'key': 'evaluationFlowRunSettings', 'type': '{EvaluationFlowRunSettings}'},
'bulk_test_flow_id': {'key': 'bulkTestFlowId', 'type': 'str'},
'bulk_test_flow_run_ids': {'key': 'bulkTestFlowRunIds', 'type': '[str]'},
'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'},
'input_universal_link': {'key': 'inputUniversalLink', 'type': 'str'},
'data_inputs': {'key': 'dataInputs', 'type': '{str}'},
'flow_run_output_directory': {'key': 'flowRunOutputDirectory', 'type': 'str'},
'connection_overrides': {'key': 'connectionOverrides', 'type': '[ConnectionOverrideSetting]'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'connections': {'key': 'connections', 'type': '{{str}}'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'output_data_store': {'key': 'outputDataStore', 'type': 'str'},
'run_display_name_generation_type': {'key': 'runDisplayNameGenerationType', 'type': 'str'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'worker_count': {'key': 'workerCount', 'type': 'int'},
'timeout_in_seconds': {'key': 'timeoutInSeconds', 'type': 'int'},
'promptflow_engine_type': {'key': 'promptflowEngineType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_inputs: This is a dictionary.
:paramtype node_inputs: dict[str, any]
:keyword run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest",
"Eval", "PairwiseEval", "ExperimentTest", "ExperimentEval".
:paramtype run_mode: str or ~flow.models.FlowRunMode
:keyword tuning_node_names:
:paramtype tuning_node_names: list[str]
:keyword tuning_node_settings: This is a dictionary.
:paramtype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting]
:keyword baseline_variant_id:
:paramtype baseline_variant_id: str
:keyword default_variant_id:
:paramtype default_variant_id: str
:keyword variants: This is a dictionary.
:paramtype variants: dict[str, list[~flow.models.Node]]
:keyword node_name:
:paramtype node_name: str
:keyword is_default_variant:
:paramtype is_default_variant: bool
:keyword node_variant_id:
:paramtype node_variant_id: str
:keyword node_output_paths: Dictionary of :code:`<string>`.
:paramtype node_output_paths: dict[str, str]
:keyword base_flow_run_id:
:paramtype base_flow_run_id: str
:keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword evaluation_flow_run_settings: This is a dictionary.
:paramtype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings]
:keyword bulk_test_flow_id:
:paramtype bulk_test_flow_id: str
:keyword bulk_test_flow_run_ids:
:paramtype bulk_test_flow_run_ids: list[str]
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword input_universal_link:
:paramtype input_universal_link: str
:keyword data_inputs: This is a dictionary.
:paramtype data_inputs: dict[str, str]
:keyword flow_run_output_directory:
:paramtype flow_run_output_directory: str
:keyword connection_overrides:
:paramtype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword runtime_name:
:paramtype runtime_name: str
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword connections: This is a dictionary.
:paramtype connections: dict[str, dict[str, str]]
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, str]
:keyword output_data_store:
:paramtype output_data_store: str
:keyword run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:paramtype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword worker_count:
:paramtype worker_count: int
:keyword timeout_in_seconds:
:paramtype timeout_in_seconds: int
:keyword promptflow_engine_type: Possible values include: "FastEngine", "ScalableEngine".
:paramtype promptflow_engine_type: str or ~flow.models.PromptflowEngineType
"""
super(FlowSubmitRunSettings, self).__init__(**kwargs)
self.node_inputs = kwargs.get('node_inputs', None)
self.run_mode = kwargs.get('run_mode', None)
self.tuning_node_names = kwargs.get('tuning_node_names', None)
self.tuning_node_settings = kwargs.get('tuning_node_settings', None)
self.baseline_variant_id = kwargs.get('baseline_variant_id', None)
self.default_variant_id = kwargs.get('default_variant_id', None)
self.variants = kwargs.get('variants', None)
self.node_name = kwargs.get('node_name', None)
self.is_default_variant = kwargs.get('is_default_variant', None)
self.node_variant_id = kwargs.get('node_variant_id', None)
self.node_output_paths = kwargs.get('node_output_paths', None)
self.base_flow_run_id = kwargs.get('base_flow_run_id', None)
self.flow_test_infos = kwargs.get('flow_test_infos', None)
self.bulk_test_id = kwargs.get('bulk_test_id', None)
self.evaluation_flow_run_settings = kwargs.get('evaluation_flow_run_settings', None)
self.bulk_test_flow_id = kwargs.get('bulk_test_flow_id', None)
self.bulk_test_flow_run_ids = kwargs.get('bulk_test_flow_run_ids', None)
self.batch_inputs = kwargs.get('batch_inputs', None)
self.input_universal_link = kwargs.get('input_universal_link', None)
self.data_inputs = kwargs.get('data_inputs', None)
self.flow_run_output_directory = kwargs.get('flow_run_output_directory', None)
self.connection_overrides = kwargs.get('connection_overrides', None)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
self.inputs_mapping = kwargs.get('inputs_mapping', None)
self.connections = kwargs.get('connections', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.output_data_store = kwargs.get('output_data_store', None)
self.run_display_name_generation_type = kwargs.get('run_display_name_generation_type', None)
self.aml_compute_name = kwargs.get('aml_compute_name', None)
self.worker_count = kwargs.get('worker_count', None)
self.timeout_in_seconds = kwargs.get('timeout_in_seconds', None)
self.promptflow_engine_type = kwargs.get('promptflow_engine_type', None)
class FlowTestInfo(msrest.serialization.Model):
"""FlowTestInfo.
:ivar variant_id:
:vartype variant_id: str
:ivar tuning_node_name:
:vartype tuning_node_name: str
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_test_storage_setting:
:vartype flow_test_storage_setting: ~flow.models.FlowTestStorageSetting
:ivar flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:vartype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:ivar variant_run_id:
:vartype variant_run_id: str
:ivar evaluation_name:
:vartype evaluation_name: str
:ivar output_universal_link:
:vartype output_universal_link: str
"""
_attribute_map = {
'variant_id': {'key': 'variantId', 'type': 'str'},
'tuning_node_name': {'key': 'tuningNodeName', 'type': 'str'},
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_test_storage_setting': {'key': 'flowTestStorageSetting', 'type': 'FlowTestStorageSetting'},
'flow_run_type': {'key': 'flowRunType', 'type': 'str'},
'variant_run_id': {'key': 'variantRunId', 'type': 'str'},
'evaluation_name': {'key': 'evaluationName', 'type': 'str'},
'output_universal_link': {'key': 'outputUniversalLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword variant_id:
:paramtype variant_id: str
:keyword tuning_node_name:
:paramtype tuning_node_name: str
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_test_storage_setting:
:paramtype flow_test_storage_setting: ~flow.models.FlowTestStorageSetting
:keyword flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:paramtype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:keyword variant_run_id:
:paramtype variant_run_id: str
:keyword evaluation_name:
:paramtype evaluation_name: str
:keyword output_universal_link:
:paramtype output_universal_link: str
"""
super(FlowTestInfo, self).__init__(**kwargs)
self.variant_id = kwargs.get('variant_id', None)
self.tuning_node_name = kwargs.get('tuning_node_name', None)
self.flow_run_id = kwargs.get('flow_run_id', None)
self.flow_test_storage_setting = kwargs.get('flow_test_storage_setting', None)
self.flow_run_type = kwargs.get('flow_run_type', None)
self.variant_run_id = kwargs.get('variant_run_id', None)
self.evaluation_name = kwargs.get('evaluation_name', None)
self.output_universal_link = kwargs.get('output_universal_link', None)
class FlowTestStorageSetting(msrest.serialization.Model):
"""FlowTestStorageSetting.
:ivar storage_account_name:
:vartype storage_account_name: str
:ivar blob_container_name:
:vartype blob_container_name: str
:ivar flow_artifacts_root_path:
:vartype flow_artifacts_root_path: str
:ivar output_datastore_name:
:vartype output_datastore_name: str
"""
_attribute_map = {
'storage_account_name': {'key': 'storageAccountName', 'type': 'str'},
'blob_container_name': {'key': 'blobContainerName', 'type': 'str'},
'flow_artifacts_root_path': {'key': 'flowArtifactsRootPath', 'type': 'str'},
'output_datastore_name': {'key': 'outputDatastoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword storage_account_name:
:paramtype storage_account_name: str
:keyword blob_container_name:
:paramtype blob_container_name: str
:keyword flow_artifacts_root_path:
:paramtype flow_artifacts_root_path: str
:keyword output_datastore_name:
:paramtype output_datastore_name: str
"""
super(FlowTestStorageSetting, self).__init__(**kwargs)
self.storage_account_name = kwargs.get('storage_account_name', None)
self.blob_container_name = kwargs.get('blob_container_name', None)
self.flow_artifacts_root_path = kwargs.get('flow_artifacts_root_path', None)
self.output_datastore_name = kwargs.get('output_datastore_name', None)
class FlowToolsDto(msrest.serialization.Model):
"""FlowToolsDto.
:ivar package: This is a dictionary.
:vartype package: dict[str, ~flow.models.Tool]
:ivar code: This is a dictionary.
:vartype code: dict[str, ~flow.models.Tool]
:ivar errors: This is a dictionary.
:vartype errors: dict[str, ~flow.models.ErrorResponse]
"""
_attribute_map = {
'package': {'key': 'package', 'type': '{Tool}'},
'code': {'key': 'code', 'type': '{Tool}'},
'errors': {'key': 'errors', 'type': '{ErrorResponse}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword package: This is a dictionary.
:paramtype package: dict[str, ~flow.models.Tool]
:keyword code: This is a dictionary.
:paramtype code: dict[str, ~flow.models.Tool]
:keyword errors: This is a dictionary.
:paramtype errors: dict[str, ~flow.models.ErrorResponse]
"""
super(FlowToolsDto, self).__init__(**kwargs)
self.package = kwargs.get('package', None)
self.code = kwargs.get('code', None)
self.errors = kwargs.get('errors', None)
class FlowToolSettingParameter(msrest.serialization.Model):
"""FlowToolSettingParameter.
:ivar type:
:vartype type: list[str or ~flow.models.ValueType]
:ivar default:
:vartype default: str
:ivar advanced:
:vartype advanced: bool
:ivar enum:
:vartype enum: list[any]
:ivar model_list:
:vartype model_list: list[str]
:ivar text_box_size:
:vartype text_box_size: int
:ivar capabilities:
:vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:ivar allow_manual_entry:
:vartype allow_manual_entry: bool
:ivar ui_hints: This is a dictionary.
:vartype ui_hints: dict[str, any]
"""
_attribute_map = {
'type': {'key': 'type', 'type': '[str]'},
'default': {'key': 'default', 'type': 'str'},
'advanced': {'key': 'advanced', 'type': 'bool'},
'enum': {'key': 'enum', 'type': '[object]'},
'model_list': {'key': 'model_list', 'type': '[str]'},
'text_box_size': {'key': 'text_box_size', 'type': 'int'},
'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'},
'allow_manual_entry': {'key': 'allow_manual_entry', 'type': 'bool'},
'ui_hints': {'key': 'ui_hints', 'type': '{object}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type:
:paramtype type: list[str or ~flow.models.ValueType]
:keyword default:
:paramtype default: str
:keyword advanced:
:paramtype advanced: bool
:keyword enum:
:paramtype enum: list[any]
:keyword model_list:
:paramtype model_list: list[str]
:keyword text_box_size:
:paramtype text_box_size: int
:keyword capabilities:
:paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:keyword allow_manual_entry:
:paramtype allow_manual_entry: bool
:keyword ui_hints: This is a dictionary.
:paramtype ui_hints: dict[str, any]
"""
super(FlowToolSettingParameter, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.default = kwargs.get('default', None)
self.advanced = kwargs.get('advanced', None)
self.enum = kwargs.get('enum', None)
self.model_list = kwargs.get('model_list', None)
self.text_box_size = kwargs.get('text_box_size', None)
self.capabilities = kwargs.get('capabilities', None)
self.allow_manual_entry = kwargs.get('allow_manual_entry', None)
self.ui_hints = kwargs.get('ui_hints', None)
class FlowVariantNode(msrest.serialization.Model):
"""FlowVariantNode.
:ivar node:
:vartype node: ~flow.models.FlowNode
:ivar description:
:vartype description: str
"""
_attribute_map = {
'node': {'key': 'node', 'type': 'FlowNode'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node:
:paramtype node: ~flow.models.FlowNode
:keyword description:
:paramtype description: str
"""
super(FlowVariantNode, self).__init__(**kwargs)
self.node = kwargs.get('node', None)
self.description = kwargs.get('description', None)
class ForecastHorizon(msrest.serialization.Model):
"""ForecastHorizon.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.ForecastHorizonMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.ForecastHorizonMode
:keyword value:
:paramtype value: int
"""
super(ForecastHorizon, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class ForecastingSettings(msrest.serialization.Model):
"""ForecastingSettings.
:ivar country_or_region_for_holidays:
:vartype country_or_region_for_holidays: str
:ivar time_column_name:
:vartype time_column_name: str
:ivar target_lags:
:vartype target_lags: ~flow.models.TargetLags
:ivar target_rolling_window_size:
:vartype target_rolling_window_size: ~flow.models.TargetRollingWindowSize
:ivar forecast_horizon:
:vartype forecast_horizon: ~flow.models.ForecastHorizon
:ivar time_series_id_column_names:
:vartype time_series_id_column_names: list[str]
:ivar frequency:
:vartype frequency: str
:ivar feature_lags:
:vartype feature_lags: str
:ivar seasonality:
:vartype seasonality: ~flow.models.Seasonality
:ivar short_series_handling_config: Possible values include: "Auto", "Pad", "Drop".
:vartype short_series_handling_config: str or ~flow.models.ShortSeriesHandlingConfiguration
:ivar use_stl: Possible values include: "Season", "SeasonTrend".
:vartype use_stl: str or ~flow.models.UseStl
:ivar target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean".
:vartype target_aggregate_function: str or ~flow.models.TargetAggregationFunction
:ivar cv_step_size:
:vartype cv_step_size: int
:ivar features_unknown_at_forecast_time:
:vartype features_unknown_at_forecast_time: list[str]
"""
_attribute_map = {
'country_or_region_for_holidays': {'key': 'countryOrRegionForHolidays', 'type': 'str'},
'time_column_name': {'key': 'timeColumnName', 'type': 'str'},
'target_lags': {'key': 'targetLags', 'type': 'TargetLags'},
'target_rolling_window_size': {'key': 'targetRollingWindowSize', 'type': 'TargetRollingWindowSize'},
'forecast_horizon': {'key': 'forecastHorizon', 'type': 'ForecastHorizon'},
'time_series_id_column_names': {'key': 'timeSeriesIdColumnNames', 'type': '[str]'},
'frequency': {'key': 'frequency', 'type': 'str'},
'feature_lags': {'key': 'featureLags', 'type': 'str'},
'seasonality': {'key': 'seasonality', 'type': 'Seasonality'},
'short_series_handling_config': {'key': 'shortSeriesHandlingConfig', 'type': 'str'},
'use_stl': {'key': 'useStl', 'type': 'str'},
'target_aggregate_function': {'key': 'targetAggregateFunction', 'type': 'str'},
'cv_step_size': {'key': 'cvStepSize', 'type': 'int'},
'features_unknown_at_forecast_time': {'key': 'featuresUnknownAtForecastTime', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword country_or_region_for_holidays:
:paramtype country_or_region_for_holidays: str
:keyword time_column_name:
:paramtype time_column_name: str
:keyword target_lags:
:paramtype target_lags: ~flow.models.TargetLags
:keyword target_rolling_window_size:
:paramtype target_rolling_window_size: ~flow.models.TargetRollingWindowSize
:keyword forecast_horizon:
:paramtype forecast_horizon: ~flow.models.ForecastHorizon
:keyword time_series_id_column_names:
:paramtype time_series_id_column_names: list[str]
:keyword frequency:
:paramtype frequency: str
:keyword feature_lags:
:paramtype feature_lags: str
:keyword seasonality:
:paramtype seasonality: ~flow.models.Seasonality
:keyword short_series_handling_config: Possible values include: "Auto", "Pad", "Drop".
:paramtype short_series_handling_config: str or ~flow.models.ShortSeriesHandlingConfiguration
:keyword use_stl: Possible values include: "Season", "SeasonTrend".
:paramtype use_stl: str or ~flow.models.UseStl
:keyword target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean".
:paramtype target_aggregate_function: str or ~flow.models.TargetAggregationFunction
:keyword cv_step_size:
:paramtype cv_step_size: int
:keyword features_unknown_at_forecast_time:
:paramtype features_unknown_at_forecast_time: list[str]
"""
super(ForecastingSettings, self).__init__(**kwargs)
self.country_or_region_for_holidays = kwargs.get('country_or_region_for_holidays', None)
self.time_column_name = kwargs.get('time_column_name', None)
self.target_lags = kwargs.get('target_lags', None)
self.target_rolling_window_size = kwargs.get('target_rolling_window_size', None)
self.forecast_horizon = kwargs.get('forecast_horizon', None)
self.time_series_id_column_names = kwargs.get('time_series_id_column_names', None)
self.frequency = kwargs.get('frequency', None)
self.feature_lags = kwargs.get('feature_lags', None)
self.seasonality = kwargs.get('seasonality', None)
self.short_series_handling_config = kwargs.get('short_series_handling_config', None)
self.use_stl = kwargs.get('use_stl', None)
self.target_aggregate_function = kwargs.get('target_aggregate_function', None)
self.cv_step_size = kwargs.get('cv_step_size', None)
self.features_unknown_at_forecast_time = kwargs.get('features_unknown_at_forecast_time', None)
class GeneralSettings(msrest.serialization.Model):
"""GeneralSettings.
:ivar primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall",
"AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation",
"NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError",
"NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou".
:vartype primary_metric: str or ~flow.models.PrimaryMetrics
:ivar task_type: Possible values include: "Classification", "Regression", "Forecasting",
"ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection",
"ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER",
"TextClassificationMultilabel".
:vartype task_type: str or ~flow.models.TaskType
:ivar log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error",
"Critical".
:vartype log_verbosity: str or ~flow.models.LogVerbosity
"""
_attribute_map = {
'primary_metric': {'key': 'primaryMetric', 'type': 'str'},
'task_type': {'key': 'taskType', 'type': 'str'},
'log_verbosity': {'key': 'logVerbosity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall",
"AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation",
"NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError",
"NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou".
:paramtype primary_metric: str or ~flow.models.PrimaryMetrics
:keyword task_type: Possible values include: "Classification", "Regression", "Forecasting",
"ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection",
"ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER",
"TextClassificationMultilabel".
:paramtype task_type: str or ~flow.models.TaskType
:keyword log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error",
"Critical".
:paramtype log_verbosity: str or ~flow.models.LogVerbosity
"""
super(GeneralSettings, self).__init__(**kwargs)
self.primary_metric = kwargs.get('primary_metric', None)
self.task_type = kwargs.get('task_type', None)
self.log_verbosity = kwargs.get('log_verbosity', None)
class GeneratePipelineComponentRequest(msrest.serialization.Model):
"""GeneratePipelineComponentRequest.
:ivar name:
:vartype name: str
:ivar display_name:
:vartype display_name: str
:ivar module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step",
"Draft", "Feed", "Registry", "SystemAutoCreated".
:vartype module_scope: str or ~flow.models.ModuleScope
:ivar is_deterministic:
:vartype is_deterministic: bool
:ivar category:
:vartype category: str
:ivar version:
:vartype version: str
:ivar set_as_default_version:
:vartype set_as_default_version: bool
:ivar registry_name:
:vartype registry_name: str
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "AssetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI",
"AssetInDpv2WithDatasetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI",
"DatasetAndAssetInDpv2WithDatasetInDesignerUI", "Dataset", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'module_scope': {'key': 'moduleScope', 'type': 'str'},
'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
'category': {'key': 'category', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'},
'registry_name': {'key': 'registryName', 'type': 'str'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword display_name:
:paramtype display_name: str
:keyword module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous",
"Step", "Draft", "Feed", "Registry", "SystemAutoCreated".
:paramtype module_scope: str or ~flow.models.ModuleScope
:keyword is_deterministic:
:paramtype is_deterministic: bool
:keyword category:
:paramtype category: str
:keyword version:
:paramtype version: str
:keyword set_as_default_version:
:paramtype set_as_default_version: bool
:keyword registry_name:
:paramtype registry_name: str
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "AssetInDesignerUI",
"DatasetInDpv2WithDatasetInDesignerUI", "AssetInDpv2WithDatasetInDesignerUI",
"AssetInDpv2WithAssetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "Dataset",
"Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(GeneratePipelineComponentRequest, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.module_scope = kwargs.get('module_scope', None)
self.is_deterministic = kwargs.get('is_deterministic', None)
self.category = kwargs.get('category', None)
self.version = kwargs.get('version', None)
self.set_as_default_version = kwargs.get('set_as_default_version', None)
self.registry_name = kwargs.get('registry_name', None)
self.graph = kwargs.get('graph', None)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.module_node_run_settings = kwargs.get('module_node_run_settings', None)
self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None)
self.tags = kwargs.get('tags', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.description = kwargs.get('description', None)
self.properties = kwargs.get('properties', None)
self.enforce_rerun = kwargs.get('enforce_rerun', None)
self.dataset_access_modes = kwargs.get('dataset_access_modes', None)
class GenerateToolMetaRequest(msrest.serialization.Model):
"""GenerateToolMetaRequest.
:ivar tools: This is a dictionary.
:vartype tools: dict[str, ~flow.models.ToolSourceMeta]
:ivar working_dir:
:vartype working_dir: str
"""
_attribute_map = {
'tools': {'key': 'tools', 'type': '{ToolSourceMeta}'},
'working_dir': {'key': 'working_dir', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword tools: This is a dictionary.
:paramtype tools: dict[str, ~flow.models.ToolSourceMeta]
:keyword working_dir:
:paramtype working_dir: str
"""
super(GenerateToolMetaRequest, self).__init__(**kwargs)
self.tools = kwargs.get('tools', None)
self.working_dir = kwargs.get('working_dir', None)
class GetDynamicListRequest(msrest.serialization.Model):
"""GetDynamicListRequest.
:ivar func_path:
:vartype func_path: str
:ivar func_kwargs: This is a dictionary.
:vartype func_kwargs: dict[str, any]
"""
_attribute_map = {
'func_path': {'key': 'func_path', 'type': 'str'},
'func_kwargs': {'key': 'func_kwargs', 'type': '{object}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword func_path:
:paramtype func_path: str
:keyword func_kwargs: This is a dictionary.
:paramtype func_kwargs: dict[str, any]
"""
super(GetDynamicListRequest, self).__init__(**kwargs)
self.func_path = kwargs.get('func_path', None)
self.func_kwargs = kwargs.get('func_kwargs', None)
class GetRunDataResultDto(msrest.serialization.Model):
"""GetRunDataResultDto.
:ivar run_metadata:
:vartype run_metadata: ~flow.models.RunDto
:ivar run_definition: Anything.
:vartype run_definition: any
:ivar job_specification: Anything.
:vartype job_specification: any
:ivar system_settings: Dictionary of :code:`<string>`.
:vartype system_settings: dict[str, str]
"""
_attribute_map = {
'run_metadata': {'key': 'runMetadata', 'type': 'RunDto'},
'run_definition': {'key': 'runDefinition', 'type': 'object'},
'job_specification': {'key': 'jobSpecification', 'type': 'object'},
'system_settings': {'key': 'systemSettings', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_metadata:
:paramtype run_metadata: ~flow.models.RunDto
:keyword run_definition: Anything.
:paramtype run_definition: any
:keyword job_specification: Anything.
:paramtype job_specification: any
:keyword system_settings: Dictionary of :code:`<string>`.
:paramtype system_settings: dict[str, str]
"""
super(GetRunDataResultDto, self).__init__(**kwargs)
self.run_metadata = kwargs.get('run_metadata', None)
self.run_definition = kwargs.get('run_definition', None)
self.job_specification = kwargs.get('job_specification', None)
self.system_settings = kwargs.get('system_settings', None)
class GetTrainingSessionDto(msrest.serialization.Model):
"""GetTrainingSessionDto.
:ivar properties:
:vartype properties: ~flow.models.SessionProperties
:ivar compute:
:vartype compute: ~flow.models.ComputeContract
"""
_attribute_map = {
'properties': {'key': 'properties', 'type': 'SessionProperties'},
'compute': {'key': 'compute', 'type': 'ComputeContract'},
}
def __init__(
self,
**kwargs
):
"""
:keyword properties:
:paramtype properties: ~flow.models.SessionProperties
:keyword compute:
:paramtype compute: ~flow.models.ComputeContract
"""
super(GetTrainingSessionDto, self).__init__(**kwargs)
self.properties = kwargs.get('properties', None)
self.compute = kwargs.get('compute', None)
class GlobalJobDispatcherConfiguration(msrest.serialization.Model):
"""GlobalJobDispatcherConfiguration.
:ivar vm_size:
:vartype vm_size: list[str]
:ivar compute_type: Possible values include: "AmlCompute", "AmlK8s".
:vartype compute_type: str or ~flow.models.GlobalJobDispatcherSupportedComputeType
:ivar region:
:vartype region: list[str]
:ivar my_resource_only:
:vartype my_resource_only: bool
:ivar redispatch_allowed:
:vartype redispatch_allowed: bool
:ivar low_priority_vm_tolerant:
:vartype low_priority_vm_tolerant: bool
:ivar vc_list:
:vartype vc_list: list[str]
:ivar plan_id:
:vartype plan_id: str
:ivar plan_region_id:
:vartype plan_region_id: str
:ivar vc_block_list:
:vartype vc_block_list: list[str]
:ivar cluster_block_list:
:vartype cluster_block_list: list[str]
"""
_attribute_map = {
'vm_size': {'key': 'vmSize', 'type': '[str]'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'region': {'key': 'region', 'type': '[str]'},
'my_resource_only': {'key': 'myResourceOnly', 'type': 'bool'},
'redispatch_allowed': {'key': 'redispatchAllowed', 'type': 'bool'},
'low_priority_vm_tolerant': {'key': 'lowPriorityVMTolerant', 'type': 'bool'},
'vc_list': {'key': 'vcList', 'type': '[str]'},
'plan_id': {'key': 'planId', 'type': 'str'},
'plan_region_id': {'key': 'planRegionId', 'type': 'str'},
'vc_block_list': {'key': 'vcBlockList', 'type': '[str]'},
'cluster_block_list': {'key': 'clusterBlockList', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword vm_size:
:paramtype vm_size: list[str]
:keyword compute_type: Possible values include: "AmlCompute", "AmlK8s".
:paramtype compute_type: str or ~flow.models.GlobalJobDispatcherSupportedComputeType
:keyword region:
:paramtype region: list[str]
:keyword my_resource_only:
:paramtype my_resource_only: bool
:keyword redispatch_allowed:
:paramtype redispatch_allowed: bool
:keyword low_priority_vm_tolerant:
:paramtype low_priority_vm_tolerant: bool
:keyword vc_list:
:paramtype vc_list: list[str]
:keyword plan_id:
:paramtype plan_id: str
:keyword plan_region_id:
:paramtype plan_region_id: str
:keyword vc_block_list:
:paramtype vc_block_list: list[str]
:keyword cluster_block_list:
:paramtype cluster_block_list: list[str]
"""
super(GlobalJobDispatcherConfiguration, self).__init__(**kwargs)
self.vm_size = kwargs.get('vm_size', None)
self.compute_type = kwargs.get('compute_type', None)
self.region = kwargs.get('region', None)
self.my_resource_only = kwargs.get('my_resource_only', None)
self.redispatch_allowed = kwargs.get('redispatch_allowed', None)
self.low_priority_vm_tolerant = kwargs.get('low_priority_vm_tolerant', None)
self.vc_list = kwargs.get('vc_list', None)
self.plan_id = kwargs.get('plan_id', None)
self.plan_region_id = kwargs.get('plan_region_id', None)
self.vc_block_list = kwargs.get('vc_block_list', None)
self.cluster_block_list = kwargs.get('cluster_block_list', None)
class GlobsOptions(msrest.serialization.Model):
"""GlobsOptions.
:ivar glob_patterns:
:vartype glob_patterns: list[str]
"""
_attribute_map = {
'glob_patterns': {'key': 'globPatterns', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword glob_patterns:
:paramtype glob_patterns: list[str]
"""
super(GlobsOptions, self).__init__(**kwargs)
self.glob_patterns = kwargs.get('glob_patterns', None)
class GraphAnnotationNode(msrest.serialization.Model):
"""GraphAnnotationNode.
:ivar id:
:vartype id: str
:ivar content:
:vartype content: str
:ivar mentioned_node_names:
:vartype mentioned_node_names: list[str]
:ivar structured_content:
:vartype structured_content: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'content': {'key': 'content', 'type': 'str'},
'mentioned_node_names': {'key': 'mentionedNodeNames', 'type': '[str]'},
'structured_content': {'key': 'structuredContent', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword content:
:paramtype content: str
:keyword mentioned_node_names:
:paramtype mentioned_node_names: list[str]
:keyword structured_content:
:paramtype structured_content: str
"""
super(GraphAnnotationNode, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.content = kwargs.get('content', None)
self.mentioned_node_names = kwargs.get('mentioned_node_names', None)
self.structured_content = kwargs.get('structured_content', None)
class GraphControlNode(msrest.serialization.Model):
"""GraphControlNode.
:ivar id:
:vartype id: str
:ivar control_type: The only acceptable values to pass in are None and "IfElse". The default
value is None.
:vartype control_type: str
:ivar control_parameter:
:vartype control_parameter: ~flow.models.ParameterAssignment
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'control_type': {'key': 'controlType', 'type': 'str'},
'control_parameter': {'key': 'controlParameter', 'type': 'ParameterAssignment'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword control_type: The only acceptable values to pass in are None and "IfElse". The
default value is None.
:paramtype control_type: str
:keyword control_parameter:
:paramtype control_parameter: ~flow.models.ParameterAssignment
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(GraphControlNode, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.control_type = kwargs.get('control_type', None)
self.control_parameter = kwargs.get('control_parameter', None)
self.run_attribution = kwargs.get('run_attribution', None)
class GraphControlReferenceNode(msrest.serialization.Model):
"""GraphControlReferenceNode.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar comment:
:vartype comment: str
:ivar control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:vartype control_flow_type: str or ~flow.models.ControlFlowType
:ivar reference_node_id:
:vartype reference_node_id: str
:ivar do_while_control_flow_info:
:vartype do_while_control_flow_info: ~flow.models.DoWhileControlFlowInfo
:ivar parallel_for_control_flow_info:
:vartype parallel_for_control_flow_info: ~flow.models.ParallelForControlFlowInfo
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'control_flow_type': {'key': 'controlFlowType', 'type': 'str'},
'reference_node_id': {'key': 'referenceNodeId', 'type': 'str'},
'do_while_control_flow_info': {'key': 'doWhileControlFlowInfo', 'type': 'DoWhileControlFlowInfo'},
'parallel_for_control_flow_info': {'key': 'parallelForControlFlowInfo', 'type': 'ParallelForControlFlowInfo'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword comment:
:paramtype comment: str
:keyword control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:paramtype control_flow_type: str or ~flow.models.ControlFlowType
:keyword reference_node_id:
:paramtype reference_node_id: str
:keyword do_while_control_flow_info:
:paramtype do_while_control_flow_info: ~flow.models.DoWhileControlFlowInfo
:keyword parallel_for_control_flow_info:
:paramtype parallel_for_control_flow_info: ~flow.models.ParallelForControlFlowInfo
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(GraphControlReferenceNode, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.comment = kwargs.get('comment', None)
self.control_flow_type = kwargs.get('control_flow_type', None)
self.reference_node_id = kwargs.get('reference_node_id', None)
self.do_while_control_flow_info = kwargs.get('do_while_control_flow_info', None)
self.parallel_for_control_flow_info = kwargs.get('parallel_for_control_flow_info', None)
self.run_attribution = kwargs.get('run_attribution', None)
class GraphDatasetNode(msrest.serialization.Model):
"""GraphDatasetNode.
:ivar id:
:vartype id: str
:ivar dataset_id:
:vartype dataset_id: str
:ivar data_path_parameter_name:
:vartype data_path_parameter_name: str
:ivar data_set_definition:
:vartype data_set_definition: ~flow.models.DataSetDefinition
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'dataset_id': {'key': 'datasetId', 'type': 'str'},
'data_path_parameter_name': {'key': 'dataPathParameterName', 'type': 'str'},
'data_set_definition': {'key': 'dataSetDefinition', 'type': 'DataSetDefinition'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword dataset_id:
:paramtype dataset_id: str
:keyword data_path_parameter_name:
:paramtype data_path_parameter_name: str
:keyword data_set_definition:
:paramtype data_set_definition: ~flow.models.DataSetDefinition
"""
super(GraphDatasetNode, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.dataset_id = kwargs.get('dataset_id', None)
self.data_path_parameter_name = kwargs.get('data_path_parameter_name', None)
self.data_set_definition = kwargs.get('data_set_definition', None)
class GraphDraftEntity(msrest.serialization.Model):
"""GraphDraftEntity.
:ivar module_nodes:
:vartype module_nodes: list[~flow.models.GraphModuleNode]
:ivar dataset_nodes:
:vartype dataset_nodes: list[~flow.models.GraphDatasetNode]
:ivar sub_graph_nodes:
:vartype sub_graph_nodes: list[~flow.models.GraphReferenceNode]
:ivar control_reference_nodes:
:vartype control_reference_nodes: list[~flow.models.GraphControlReferenceNode]
:ivar control_nodes:
:vartype control_nodes: list[~flow.models.GraphControlNode]
:ivar edges:
:vartype edges: list[~flow.models.GraphEdge]
:ivar entity_interface:
:vartype entity_interface: ~flow.models.EntityInterface
:ivar graph_layout:
:vartype graph_layout: ~flow.models.GraphLayout
:ivar created_by:
:vartype created_by: ~flow.models.CreatedBy
:ivar last_updated_by:
:vartype last_updated_by: ~flow.models.CreatedBy
:ivar default_compute:
:vartype default_compute: ~flow.models.ComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.DatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.CloudPrioritySetting
:ivar extended_properties: This is a dictionary.
:vartype extended_properties: dict[str, str]
:ivar parent_sub_graph_module_ids:
:vartype parent_sub_graph_module_ids: list[str]
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'module_nodes': {'key': 'moduleNodes', 'type': '[GraphModuleNode]'},
'dataset_nodes': {'key': 'datasetNodes', 'type': '[GraphDatasetNode]'},
'sub_graph_nodes': {'key': 'subGraphNodes', 'type': '[GraphReferenceNode]'},
'control_reference_nodes': {'key': 'controlReferenceNodes', 'type': '[GraphControlReferenceNode]'},
'control_nodes': {'key': 'controlNodes', 'type': '[GraphControlNode]'},
'edges': {'key': 'edges', 'type': '[GraphEdge]'},
'entity_interface': {'key': 'entityInterface', 'type': 'EntityInterface'},
'graph_layout': {'key': 'graphLayout', 'type': 'GraphLayout'},
'created_by': {'key': 'createdBy', 'type': 'CreatedBy'},
'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'CreatedBy'},
'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'},
'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'},
'extended_properties': {'key': 'extendedProperties', 'type': '{str}'},
'parent_sub_graph_module_ids': {'key': 'parentSubGraphModuleIds', 'type': '[str]'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword module_nodes:
:paramtype module_nodes: list[~flow.models.GraphModuleNode]
:keyword dataset_nodes:
:paramtype dataset_nodes: list[~flow.models.GraphDatasetNode]
:keyword sub_graph_nodes:
:paramtype sub_graph_nodes: list[~flow.models.GraphReferenceNode]
:keyword control_reference_nodes:
:paramtype control_reference_nodes: list[~flow.models.GraphControlReferenceNode]
:keyword control_nodes:
:paramtype control_nodes: list[~flow.models.GraphControlNode]
:keyword edges:
:paramtype edges: list[~flow.models.GraphEdge]
:keyword entity_interface:
:paramtype entity_interface: ~flow.models.EntityInterface
:keyword graph_layout:
:paramtype graph_layout: ~flow.models.GraphLayout
:keyword created_by:
:paramtype created_by: ~flow.models.CreatedBy
:keyword last_updated_by:
:paramtype last_updated_by: ~flow.models.CreatedBy
:keyword default_compute:
:paramtype default_compute: ~flow.models.ComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.DatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting
:keyword extended_properties: This is a dictionary.
:paramtype extended_properties: dict[str, str]
:keyword parent_sub_graph_module_ids:
:paramtype parent_sub_graph_module_ids: list[str]
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(GraphDraftEntity, self).__init__(**kwargs)
self.module_nodes = kwargs.get('module_nodes', None)
self.dataset_nodes = kwargs.get('dataset_nodes', None)
self.sub_graph_nodes = kwargs.get('sub_graph_nodes', None)
self.control_reference_nodes = kwargs.get('control_reference_nodes', None)
self.control_nodes = kwargs.get('control_nodes', None)
self.edges = kwargs.get('edges', None)
self.entity_interface = kwargs.get('entity_interface', None)
self.graph_layout = kwargs.get('graph_layout', None)
self.created_by = kwargs.get('created_by', None)
self.last_updated_by = kwargs.get('last_updated_by', None)
self.default_compute = kwargs.get('default_compute', None)
self.default_datastore = kwargs.get('default_datastore', None)
self.default_cloud_priority = kwargs.get('default_cloud_priority', None)
self.extended_properties = kwargs.get('extended_properties', None)
self.parent_sub_graph_module_ids = kwargs.get('parent_sub_graph_module_ids', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class GraphEdge(msrest.serialization.Model):
"""GraphEdge.
:ivar source_output_port:
:vartype source_output_port: ~flow.models.PortInfo
:ivar destination_input_port:
:vartype destination_input_port: ~flow.models.PortInfo
"""
_attribute_map = {
'source_output_port': {'key': 'sourceOutputPort', 'type': 'PortInfo'},
'destination_input_port': {'key': 'destinationInputPort', 'type': 'PortInfo'},
}
def __init__(
self,
**kwargs
):
"""
:keyword source_output_port:
:paramtype source_output_port: ~flow.models.PortInfo
:keyword destination_input_port:
:paramtype destination_input_port: ~flow.models.PortInfo
"""
super(GraphEdge, self).__init__(**kwargs)
self.source_output_port = kwargs.get('source_output_port', None)
self.destination_input_port = kwargs.get('destination_input_port', None)
class GraphLayout(msrest.serialization.Model):
"""GraphLayout.
:ivar node_layouts: This is a dictionary.
:vartype node_layouts: dict[str, ~flow.models.NodeLayout]
:ivar extended_data:
:vartype extended_data: str
:ivar annotation_nodes:
:vartype annotation_nodes: list[~flow.models.GraphAnnotationNode]
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'node_layouts': {'key': 'nodeLayouts', 'type': '{NodeLayout}'},
'extended_data': {'key': 'extendedData', 'type': 'str'},
'annotation_nodes': {'key': 'annotationNodes', 'type': '[GraphAnnotationNode]'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_layouts: This is a dictionary.
:paramtype node_layouts: dict[str, ~flow.models.NodeLayout]
:keyword extended_data:
:paramtype extended_data: str
:keyword annotation_nodes:
:paramtype annotation_nodes: list[~flow.models.GraphAnnotationNode]
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(GraphLayout, self).__init__(**kwargs)
self.node_layouts = kwargs.get('node_layouts', None)
self.extended_data = kwargs.get('extended_data', None)
self.annotation_nodes = kwargs.get('annotation_nodes', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class GraphLayoutCreationInfo(msrest.serialization.Model):
"""GraphLayoutCreationInfo.
:ivar node_layouts: This is a dictionary.
:vartype node_layouts: dict[str, ~flow.models.NodeLayout]
:ivar extended_data:
:vartype extended_data: str
:ivar annotation_nodes:
:vartype annotation_nodes: list[~flow.models.GraphAnnotationNode]
"""
_attribute_map = {
'node_layouts': {'key': 'nodeLayouts', 'type': '{NodeLayout}'},
'extended_data': {'key': 'extendedData', 'type': 'str'},
'annotation_nodes': {'key': 'annotationNodes', 'type': '[GraphAnnotationNode]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_layouts: This is a dictionary.
:paramtype node_layouts: dict[str, ~flow.models.NodeLayout]
:keyword extended_data:
:paramtype extended_data: str
:keyword annotation_nodes:
:paramtype annotation_nodes: list[~flow.models.GraphAnnotationNode]
"""
super(GraphLayoutCreationInfo, self).__init__(**kwargs)
self.node_layouts = kwargs.get('node_layouts', None)
self.extended_data = kwargs.get('extended_data', None)
self.annotation_nodes = kwargs.get('annotation_nodes', None)
class GraphModuleNode(msrest.serialization.Model):
"""GraphModuleNode.
:ivar module_type: Possible values include: "None", "BatchInferencing".
:vartype module_type: str or ~flow.models.ModuleType
:ivar runconfig:
:vartype runconfig: str
:ivar id:
:vartype id: str
:ivar module_id:
:vartype module_id: str
:ivar comment:
:vartype comment: str
:ivar name:
:vartype name: str
:ivar module_parameters:
:vartype module_parameters: list[~flow.models.ParameterAssignment]
:ivar module_metadata_parameters:
:vartype module_metadata_parameters: list[~flow.models.ParameterAssignment]
:ivar module_output_settings:
:vartype module_output_settings: list[~flow.models.OutputSetting]
:ivar module_input_settings:
:vartype module_input_settings: list[~flow.models.InputSetting]
:ivar use_graph_default_compute:
:vartype use_graph_default_compute: bool
:ivar use_graph_default_datastore:
:vartype use_graph_default_datastore: bool
:ivar regenerate_output:
:vartype regenerate_output: bool
:ivar control_inputs:
:vartype control_inputs: list[~flow.models.ControlInput]
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.CloudSettings
:ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization".
:vartype execution_phase: str or ~flow.models.ExecutionPhase
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'module_type': {'key': 'moduleType', 'type': 'str'},
'runconfig': {'key': 'runconfig', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'module_parameters': {'key': 'moduleParameters', 'type': '[ParameterAssignment]'},
'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[ParameterAssignment]'},
'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[OutputSetting]'},
'module_input_settings': {'key': 'moduleInputSettings', 'type': '[InputSetting]'},
'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'},
'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'},
'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'},
'control_inputs': {'key': 'controlInputs', 'type': '[ControlInput]'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'CloudSettings'},
'execution_phase': {'key': 'executionPhase', 'type': 'str'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword module_type: Possible values include: "None", "BatchInferencing".
:paramtype module_type: str or ~flow.models.ModuleType
:keyword runconfig:
:paramtype runconfig: str
:keyword id:
:paramtype id: str
:keyword module_id:
:paramtype module_id: str
:keyword comment:
:paramtype comment: str
:keyword name:
:paramtype name: str
:keyword module_parameters:
:paramtype module_parameters: list[~flow.models.ParameterAssignment]
:keyword module_metadata_parameters:
:paramtype module_metadata_parameters: list[~flow.models.ParameterAssignment]
:keyword module_output_settings:
:paramtype module_output_settings: list[~flow.models.OutputSetting]
:keyword module_input_settings:
:paramtype module_input_settings: list[~flow.models.InputSetting]
:keyword use_graph_default_compute:
:paramtype use_graph_default_compute: bool
:keyword use_graph_default_datastore:
:paramtype use_graph_default_datastore: bool
:keyword regenerate_output:
:paramtype regenerate_output: bool
:keyword control_inputs:
:paramtype control_inputs: list[~flow.models.ControlInput]
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.CloudSettings
:keyword execution_phase: Possible values include: "Execution", "Initialization",
"Finalization".
:paramtype execution_phase: str or ~flow.models.ExecutionPhase
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(GraphModuleNode, self).__init__(**kwargs)
self.module_type = kwargs.get('module_type', None)
self.runconfig = kwargs.get('runconfig', None)
self.id = kwargs.get('id', None)
self.module_id = kwargs.get('module_id', None)
self.comment = kwargs.get('comment', None)
self.name = kwargs.get('name', None)
self.module_parameters = kwargs.get('module_parameters', None)
self.module_metadata_parameters = kwargs.get('module_metadata_parameters', None)
self.module_output_settings = kwargs.get('module_output_settings', None)
self.module_input_settings = kwargs.get('module_input_settings', None)
self.use_graph_default_compute = kwargs.get('use_graph_default_compute', None)
self.use_graph_default_datastore = kwargs.get('use_graph_default_datastore', None)
self.regenerate_output = kwargs.get('regenerate_output', None)
self.control_inputs = kwargs.get('control_inputs', None)
self.cloud_settings = kwargs.get('cloud_settings', None)
self.execution_phase = kwargs.get('execution_phase', None)
self.run_attribution = kwargs.get('run_attribution', None)
class GraphModuleNodeRunSetting(msrest.serialization.Model):
"""GraphModuleNodeRunSetting.
:ivar node_id:
:vartype node_id: str
:ivar module_id:
:vartype module_id: str
:ivar step_type:
:vartype step_type: str
:ivar run_settings:
:vartype run_settings: list[~flow.models.RunSettingParameterAssignment]
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'step_type': {'key': 'stepType', 'type': 'str'},
'run_settings': {'key': 'runSettings', 'type': '[RunSettingParameterAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword module_id:
:paramtype module_id: str
:keyword step_type:
:paramtype step_type: str
:keyword run_settings:
:paramtype run_settings: list[~flow.models.RunSettingParameterAssignment]
"""
super(GraphModuleNodeRunSetting, self).__init__(**kwargs)
self.node_id = kwargs.get('node_id', None)
self.module_id = kwargs.get('module_id', None)
self.step_type = kwargs.get('step_type', None)
self.run_settings = kwargs.get('run_settings', None)
class GraphModuleNodeUIInputSetting(msrest.serialization.Model):
"""GraphModuleNodeUIInputSetting.
:ivar node_id:
:vartype node_id: str
:ivar module_id:
:vartype module_id: str
:ivar module_input_settings:
:vartype module_input_settings: list[~flow.models.UIInputSetting]
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'module_input_settings': {'key': 'moduleInputSettings', 'type': '[UIInputSetting]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword module_id:
:paramtype module_id: str
:keyword module_input_settings:
:paramtype module_input_settings: list[~flow.models.UIInputSetting]
"""
super(GraphModuleNodeUIInputSetting, self).__init__(**kwargs)
self.node_id = kwargs.get('node_id', None)
self.module_id = kwargs.get('module_id', None)
self.module_input_settings = kwargs.get('module_input_settings', None)
class GraphNodeStatusInfo(msrest.serialization.Model):
"""GraphNodeStatusInfo.
:ivar status: Possible values include: "NotStarted", "Queued", "Running", "Failed", "Finished",
"Canceled", "PartiallyExecuted", "Bypassed".
:vartype status: str or ~flow.models.TaskStatusCode
:ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype run_status: str or ~flow.models.RunStatus
:ivar is_bypassed:
:vartype is_bypassed: bool
:ivar has_failed_child_run:
:vartype has_failed_child_run: bool
:ivar partially_executed:
:vartype partially_executed: bool
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar aether_start_time:
:vartype aether_start_time: ~datetime.datetime
:ivar aether_end_time:
:vartype aether_end_time: ~datetime.datetime
:ivar aether_creation_time:
:vartype aether_creation_time: ~datetime.datetime
:ivar run_history_start_time:
:vartype run_history_start_time: ~datetime.datetime
:ivar run_history_end_time:
:vartype run_history_end_time: ~datetime.datetime
:ivar run_history_creation_time:
:vartype run_history_creation_time: ~datetime.datetime
:ivar reuse_info:
:vartype reuse_info: ~flow.models.TaskReuseInfo
:ivar control_flow_info:
:vartype control_flow_info: ~flow.models.TaskControlFlowInfo
:ivar status_code: Possible values include: "NotStarted", "Queued", "Running", "Failed",
"Finished", "Canceled", "PartiallyExecuted", "Bypassed".
:vartype status_code: str or ~flow.models.TaskStatusCode
:ivar status_detail:
:vartype status_detail: str
:ivar creation_time:
:vartype creation_time: ~datetime.datetime
:ivar schedule_time:
:vartype schedule_time: ~datetime.datetime
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar request_id:
:vartype request_id: str
:ivar run_id:
:vartype run_id: str
:ivar data_container_id:
:vartype data_container_id: str
:ivar real_time_log_path:
:vartype real_time_log_path: str
:ivar has_warnings:
:vartype has_warnings: bool
:ivar composite_node_id:
:vartype composite_node_id: str
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'run_status': {'key': 'runStatus', 'type': 'str'},
'is_bypassed': {'key': 'isBypassed', 'type': 'bool'},
'has_failed_child_run': {'key': 'hasFailedChildRun', 'type': 'bool'},
'partially_executed': {'key': 'partiallyExecuted', 'type': 'bool'},
'properties': {'key': 'properties', 'type': '{str}'},
'aether_start_time': {'key': 'aetherStartTime', 'type': 'iso-8601'},
'aether_end_time': {'key': 'aetherEndTime', 'type': 'iso-8601'},
'aether_creation_time': {'key': 'aetherCreationTime', 'type': 'iso-8601'},
'run_history_start_time': {'key': 'runHistoryStartTime', 'type': 'iso-8601'},
'run_history_end_time': {'key': 'runHistoryEndTime', 'type': 'iso-8601'},
'run_history_creation_time': {'key': 'runHistoryCreationTime', 'type': 'iso-8601'},
'reuse_info': {'key': 'reuseInfo', 'type': 'TaskReuseInfo'},
'control_flow_info': {'key': 'controlFlowInfo', 'type': 'TaskControlFlowInfo'},
'status_code': {'key': 'statusCode', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'creation_time': {'key': 'creationTime', 'type': 'iso-8601'},
'schedule_time': {'key': 'scheduleTime', 'type': 'iso-8601'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'request_id': {'key': 'requestId', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'real_time_log_path': {'key': 'realTimeLogPath', 'type': 'str'},
'has_warnings': {'key': 'hasWarnings', 'type': 'bool'},
'composite_node_id': {'key': 'compositeNodeId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword status: Possible values include: "NotStarted", "Queued", "Running", "Failed",
"Finished", "Canceled", "PartiallyExecuted", "Bypassed".
:paramtype status: str or ~flow.models.TaskStatusCode
:keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype run_status: str or ~flow.models.RunStatus
:keyword is_bypassed:
:paramtype is_bypassed: bool
:keyword has_failed_child_run:
:paramtype has_failed_child_run: bool
:keyword partially_executed:
:paramtype partially_executed: bool
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword aether_start_time:
:paramtype aether_start_time: ~datetime.datetime
:keyword aether_end_time:
:paramtype aether_end_time: ~datetime.datetime
:keyword aether_creation_time:
:paramtype aether_creation_time: ~datetime.datetime
:keyword run_history_start_time:
:paramtype run_history_start_time: ~datetime.datetime
:keyword run_history_end_time:
:paramtype run_history_end_time: ~datetime.datetime
:keyword run_history_creation_time:
:paramtype run_history_creation_time: ~datetime.datetime
:keyword reuse_info:
:paramtype reuse_info: ~flow.models.TaskReuseInfo
:keyword control_flow_info:
:paramtype control_flow_info: ~flow.models.TaskControlFlowInfo
:keyword status_code: Possible values include: "NotStarted", "Queued", "Running", "Failed",
"Finished", "Canceled", "PartiallyExecuted", "Bypassed".
:paramtype status_code: str or ~flow.models.TaskStatusCode
:keyword status_detail:
:paramtype status_detail: str
:keyword creation_time:
:paramtype creation_time: ~datetime.datetime
:keyword schedule_time:
:paramtype schedule_time: ~datetime.datetime
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword request_id:
:paramtype request_id: str
:keyword run_id:
:paramtype run_id: str
:keyword data_container_id:
:paramtype data_container_id: str
:keyword real_time_log_path:
:paramtype real_time_log_path: str
:keyword has_warnings:
:paramtype has_warnings: bool
:keyword composite_node_id:
:paramtype composite_node_id: str
"""
super(GraphNodeStatusInfo, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.run_status = kwargs.get('run_status', None)
self.is_bypassed = kwargs.get('is_bypassed', None)
self.has_failed_child_run = kwargs.get('has_failed_child_run', None)
self.partially_executed = kwargs.get('partially_executed', None)
self.properties = kwargs.get('properties', None)
self.aether_start_time = kwargs.get('aether_start_time', None)
self.aether_end_time = kwargs.get('aether_end_time', None)
self.aether_creation_time = kwargs.get('aether_creation_time', None)
self.run_history_start_time = kwargs.get('run_history_start_time', None)
self.run_history_end_time = kwargs.get('run_history_end_time', None)
self.run_history_creation_time = kwargs.get('run_history_creation_time', None)
self.reuse_info = kwargs.get('reuse_info', None)
self.control_flow_info = kwargs.get('control_flow_info', None)
self.status_code = kwargs.get('status_code', None)
self.status_detail = kwargs.get('status_detail', None)
self.creation_time = kwargs.get('creation_time', None)
self.schedule_time = kwargs.get('schedule_time', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.request_id = kwargs.get('request_id', None)
self.run_id = kwargs.get('run_id', None)
self.data_container_id = kwargs.get('data_container_id', None)
self.real_time_log_path = kwargs.get('real_time_log_path', None)
self.has_warnings = kwargs.get('has_warnings', None)
self.composite_node_id = kwargs.get('composite_node_id', None)
class GraphReferenceNode(msrest.serialization.Model):
"""GraphReferenceNode.
:ivar graph_id:
:vartype graph_id: str
:ivar default_compute:
:vartype default_compute: ~flow.models.ComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.DatastoreSetting
:ivar id:
:vartype id: str
:ivar module_id:
:vartype module_id: str
:ivar comment:
:vartype comment: str
:ivar name:
:vartype name: str
:ivar module_parameters:
:vartype module_parameters: list[~flow.models.ParameterAssignment]
:ivar module_metadata_parameters:
:vartype module_metadata_parameters: list[~flow.models.ParameterAssignment]
:ivar module_output_settings:
:vartype module_output_settings: list[~flow.models.OutputSetting]
:ivar module_input_settings:
:vartype module_input_settings: list[~flow.models.InputSetting]
:ivar use_graph_default_compute:
:vartype use_graph_default_compute: bool
:ivar use_graph_default_datastore:
:vartype use_graph_default_datastore: bool
:ivar regenerate_output:
:vartype regenerate_output: bool
:ivar control_inputs:
:vartype control_inputs: list[~flow.models.ControlInput]
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.CloudSettings
:ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization".
:vartype execution_phase: str or ~flow.models.ExecutionPhase
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'graph_id': {'key': 'graphId', 'type': 'str'},
'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'},
'id': {'key': 'id', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'module_parameters': {'key': 'moduleParameters', 'type': '[ParameterAssignment]'},
'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[ParameterAssignment]'},
'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[OutputSetting]'},
'module_input_settings': {'key': 'moduleInputSettings', 'type': '[InputSetting]'},
'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'},
'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'},
'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'},
'control_inputs': {'key': 'controlInputs', 'type': '[ControlInput]'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'CloudSettings'},
'execution_phase': {'key': 'executionPhase', 'type': 'str'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword graph_id:
:paramtype graph_id: str
:keyword default_compute:
:paramtype default_compute: ~flow.models.ComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.DatastoreSetting
:keyword id:
:paramtype id: str
:keyword module_id:
:paramtype module_id: str
:keyword comment:
:paramtype comment: str
:keyword name:
:paramtype name: str
:keyword module_parameters:
:paramtype module_parameters: list[~flow.models.ParameterAssignment]
:keyword module_metadata_parameters:
:paramtype module_metadata_parameters: list[~flow.models.ParameterAssignment]
:keyword module_output_settings:
:paramtype module_output_settings: list[~flow.models.OutputSetting]
:keyword module_input_settings:
:paramtype module_input_settings: list[~flow.models.InputSetting]
:keyword use_graph_default_compute:
:paramtype use_graph_default_compute: bool
:keyword use_graph_default_datastore:
:paramtype use_graph_default_datastore: bool
:keyword regenerate_output:
:paramtype regenerate_output: bool
:keyword control_inputs:
:paramtype control_inputs: list[~flow.models.ControlInput]
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.CloudSettings
:keyword execution_phase: Possible values include: "Execution", "Initialization",
"Finalization".
:paramtype execution_phase: str or ~flow.models.ExecutionPhase
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(GraphReferenceNode, self).__init__(**kwargs)
self.graph_id = kwargs.get('graph_id', None)
self.default_compute = kwargs.get('default_compute', None)
self.default_datastore = kwargs.get('default_datastore', None)
self.id = kwargs.get('id', None)
self.module_id = kwargs.get('module_id', None)
self.comment = kwargs.get('comment', None)
self.name = kwargs.get('name', None)
self.module_parameters = kwargs.get('module_parameters', None)
self.module_metadata_parameters = kwargs.get('module_metadata_parameters', None)
self.module_output_settings = kwargs.get('module_output_settings', None)
self.module_input_settings = kwargs.get('module_input_settings', None)
self.use_graph_default_compute = kwargs.get('use_graph_default_compute', None)
self.use_graph_default_datastore = kwargs.get('use_graph_default_datastore', None)
self.regenerate_output = kwargs.get('regenerate_output', None)
self.control_inputs = kwargs.get('control_inputs', None)
self.cloud_settings = kwargs.get('cloud_settings', None)
self.execution_phase = kwargs.get('execution_phase', None)
self.run_attribution = kwargs.get('run_attribution', None)
class HdfsReference(msrest.serialization.Model):
"""HdfsReference.
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(HdfsReference, self).__init__(**kwargs)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
class HdiClusterComputeInfo(msrest.serialization.Model):
"""HdiClusterComputeInfo.
:ivar address:
:vartype address: str
:ivar username:
:vartype username: str
:ivar password:
:vartype password: str
:ivar private_key:
:vartype private_key: str
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'private_key': {'key': 'privateKey', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword address:
:paramtype address: str
:keyword username:
:paramtype username: str
:keyword password:
:paramtype password: str
:keyword private_key:
:paramtype private_key: str
"""
super(HdiClusterComputeInfo, self).__init__(**kwargs)
self.address = kwargs.get('address', None)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.private_key = kwargs.get('private_key', None)
class HdiConfiguration(msrest.serialization.Model):
"""HdiConfiguration.
:ivar yarn_deploy_mode: Possible values include: "None", "Client", "Cluster".
:vartype yarn_deploy_mode: str or ~flow.models.YarnDeployMode
"""
_attribute_map = {
'yarn_deploy_mode': {'key': 'yarnDeployMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword yarn_deploy_mode: Possible values include: "None", "Client", "Cluster".
:paramtype yarn_deploy_mode: str or ~flow.models.YarnDeployMode
"""
super(HdiConfiguration, self).__init__(**kwargs)
self.yarn_deploy_mode = kwargs.get('yarn_deploy_mode', None)
class HdiRunConfiguration(msrest.serialization.Model):
"""HdiRunConfiguration.
:ivar file:
:vartype file: str
:ivar class_name:
:vartype class_name: str
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar py_files:
:vartype py_files: list[str]
:ivar compute_name:
:vartype compute_name: str
:ivar queue:
:vartype queue: str
:ivar driver_memory:
:vartype driver_memory: str
:ivar driver_cores:
:vartype driver_cores: int
:ivar executor_memory:
:vartype executor_memory: str
:ivar executor_cores:
:vartype executor_cores: int
:ivar number_executors:
:vartype number_executors: int
:ivar conf: Dictionary of :code:`<string>`.
:vartype conf: dict[str, str]
:ivar name:
:vartype name: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'class_name': {'key': 'className', 'type': 'str'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'compute_name': {'key': 'computeName', 'type': 'str'},
'queue': {'key': 'queue', 'type': 'str'},
'driver_memory': {'key': 'driverMemory', 'type': 'str'},
'driver_cores': {'key': 'driverCores', 'type': 'int'},
'executor_memory': {'key': 'executorMemory', 'type': 'str'},
'executor_cores': {'key': 'executorCores', 'type': 'int'},
'number_executors': {'key': 'numberExecutors', 'type': 'int'},
'conf': {'key': 'conf', 'type': '{str}'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword class_name:
:paramtype class_name: str
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword py_files:
:paramtype py_files: list[str]
:keyword compute_name:
:paramtype compute_name: str
:keyword queue:
:paramtype queue: str
:keyword driver_memory:
:paramtype driver_memory: str
:keyword driver_cores:
:paramtype driver_cores: int
:keyword executor_memory:
:paramtype executor_memory: str
:keyword executor_cores:
:paramtype executor_cores: int
:keyword number_executors:
:paramtype number_executors: int
:keyword conf: Dictionary of :code:`<string>`.
:paramtype conf: dict[str, str]
:keyword name:
:paramtype name: str
"""
super(HdiRunConfiguration, self).__init__(**kwargs)
self.file = kwargs.get('file', None)
self.class_name = kwargs.get('class_name', None)
self.files = kwargs.get('files', None)
self.archives = kwargs.get('archives', None)
self.jars = kwargs.get('jars', None)
self.py_files = kwargs.get('py_files', None)
self.compute_name = kwargs.get('compute_name', None)
self.queue = kwargs.get('queue', None)
self.driver_memory = kwargs.get('driver_memory', None)
self.driver_cores = kwargs.get('driver_cores', None)
self.executor_memory = kwargs.get('executor_memory', None)
self.executor_cores = kwargs.get('executor_cores', None)
self.number_executors = kwargs.get('number_executors', None)
self.conf = kwargs.get('conf', None)
self.name = kwargs.get('name', None)
class HistoryConfiguration(msrest.serialization.Model):
"""HistoryConfiguration.
:ivar output_collection:
:vartype output_collection: bool
:ivar directories_to_watch:
:vartype directories_to_watch: list[str]
:ivar enable_m_lflow_tracking:
:vartype enable_m_lflow_tracking: bool
"""
_attribute_map = {
'output_collection': {'key': 'outputCollection', 'type': 'bool'},
'directories_to_watch': {'key': 'directoriesToWatch', 'type': '[str]'},
'enable_m_lflow_tracking': {'key': 'enableMLflowTracking', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword output_collection:
:paramtype output_collection: bool
:keyword directories_to_watch:
:paramtype directories_to_watch: list[str]
:keyword enable_m_lflow_tracking:
:paramtype enable_m_lflow_tracking: bool
"""
super(HistoryConfiguration, self).__init__(**kwargs)
self.output_collection = kwargs.get('output_collection', True)
self.directories_to_watch = kwargs.get('directories_to_watch', ['logs'])
self.enable_m_lflow_tracking = kwargs.get('enable_m_lflow_tracking', True)
class HyperDriveConfiguration(msrest.serialization.Model):
"""HyperDriveConfiguration.
:ivar hyper_drive_run_config:
:vartype hyper_drive_run_config: str
:ivar primary_metric_goal:
:vartype primary_metric_goal: str
:ivar primary_metric_name:
:vartype primary_metric_name: str
:ivar arguments:
:vartype arguments: list[~flow.models.ArgumentAssignment]
"""
_attribute_map = {
'hyper_drive_run_config': {'key': 'hyperDriveRunConfig', 'type': 'str'},
'primary_metric_goal': {'key': 'primaryMetricGoal', 'type': 'str'},
'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': '[ArgumentAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword hyper_drive_run_config:
:paramtype hyper_drive_run_config: str
:keyword primary_metric_goal:
:paramtype primary_metric_goal: str
:keyword primary_metric_name:
:paramtype primary_metric_name: str
:keyword arguments:
:paramtype arguments: list[~flow.models.ArgumentAssignment]
"""
super(HyperDriveConfiguration, self).__init__(**kwargs)
self.hyper_drive_run_config = kwargs.get('hyper_drive_run_config', None)
self.primary_metric_goal = kwargs.get('primary_metric_goal', None)
self.primary_metric_name = kwargs.get('primary_metric_name', None)
self.arguments = kwargs.get('arguments', None)
class ICheckableLongRunningOperationResponse(msrest.serialization.Model):
"""ICheckableLongRunningOperationResponse.
:ivar completion_result: Any object.
:vartype completion_result: any
:ivar location:
:vartype location: str
:ivar operation_result:
:vartype operation_result: str
"""
_attribute_map = {
'completion_result': {'key': 'completionResult', 'type': 'object'},
'location': {'key': 'location', 'type': 'str'},
'operation_result': {'key': 'operationResult', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword completion_result: Any object.
:paramtype completion_result: any
:keyword location:
:paramtype location: str
:keyword operation_result:
:paramtype operation_result: str
"""
super(ICheckableLongRunningOperationResponse, self).__init__(**kwargs)
self.completion_result = kwargs.get('completion_result', None)
self.location = kwargs.get('location', None)
self.operation_result = kwargs.get('operation_result', None)
class IdentityConfiguration(msrest.serialization.Model):
"""IdentityConfiguration.
:ivar type: Possible values include: "Managed", "ServicePrincipal", "AMLToken".
:vartype type: str or ~flow.models.IdentityType
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar secret:
:vartype secret: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'secret': {'key': 'secret', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "Managed", "ServicePrincipal", "AMLToken".
:paramtype type: str or ~flow.models.IdentityType
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword secret:
:paramtype secret: str
"""
super(IdentityConfiguration, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.properties = kwargs.get('properties', None)
self.secret = kwargs.get('secret', None)
class IdentitySetting(msrest.serialization.Model):
"""IdentitySetting.
:ivar type: Possible values include: "UserIdentity", "Managed", "AMLToken".
:vartype type: str or ~flow.models.AEVAIdentityType
:ivar client_id:
:vartype client_id: str
:ivar object_id:
:vartype object_id: str
:ivar msi_resource_id:
:vartype msi_resource_id: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
'object_id': {'key': 'objectId', 'type': 'str'},
'msi_resource_id': {'key': 'msiResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "UserIdentity", "Managed", "AMLToken".
:paramtype type: str or ~flow.models.AEVAIdentityType
:keyword client_id:
:paramtype client_id: str
:keyword object_id:
:paramtype object_id: str
:keyword msi_resource_id:
:paramtype msi_resource_id: str
"""
super(IdentitySetting, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.client_id = kwargs.get('client_id', None)
self.object_id = kwargs.get('object_id', None)
self.msi_resource_id = kwargs.get('msi_resource_id', None)
class ImportDataTask(msrest.serialization.Model):
"""ImportDataTask.
:ivar data_transfer_source:
:vartype data_transfer_source: ~flow.models.DataTransferSource
"""
_attribute_map = {
'data_transfer_source': {'key': 'DataTransferSource', 'type': 'DataTransferSource'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_transfer_source:
:paramtype data_transfer_source: ~flow.models.DataTransferSource
"""
super(ImportDataTask, self).__init__(**kwargs)
self.data_transfer_source = kwargs.get('data_transfer_source', None)
class IndexedErrorResponse(msrest.serialization.Model):
"""IndexedErrorResponse.
:ivar code:
:vartype code: str
:ivar error_code_hierarchy:
:vartype error_code_hierarchy: str
:ivar message:
:vartype message: str
:ivar time:
:vartype time: ~datetime.datetime
:ivar component_name:
:vartype component_name: str
:ivar severity:
:vartype severity: int
:ivar details_uri:
:vartype details_uri: str
:ivar reference_code:
:vartype reference_code: str
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'error_code_hierarchy': {'key': 'errorCodeHierarchy', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'time': {'key': 'time', 'type': 'iso-8601'},
'component_name': {'key': 'componentName', 'type': 'str'},
'severity': {'key': 'severity', 'type': 'int'},
'details_uri': {'key': 'detailsUri', 'type': 'str'},
'reference_code': {'key': 'referenceCode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword error_code_hierarchy:
:paramtype error_code_hierarchy: str
:keyword message:
:paramtype message: str
:keyword time:
:paramtype time: ~datetime.datetime
:keyword component_name:
:paramtype component_name: str
:keyword severity:
:paramtype severity: int
:keyword details_uri:
:paramtype details_uri: str
:keyword reference_code:
:paramtype reference_code: str
"""
super(IndexedErrorResponse, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.error_code_hierarchy = kwargs.get('error_code_hierarchy', None)
self.message = kwargs.get('message', None)
self.time = kwargs.get('time', None)
self.component_name = kwargs.get('component_name', None)
self.severity = kwargs.get('severity', None)
self.details_uri = kwargs.get('details_uri', None)
self.reference_code = kwargs.get('reference_code', None)
class InitScriptInfoDto(msrest.serialization.Model):
"""InitScriptInfoDto.
:ivar dbfs:
:vartype dbfs: ~flow.models.DbfsStorageInfoDto
"""
_attribute_map = {
'dbfs': {'key': 'dbfs', 'type': 'DbfsStorageInfoDto'},
}
def __init__(
self,
**kwargs
):
"""
:keyword dbfs:
:paramtype dbfs: ~flow.models.DbfsStorageInfoDto
"""
super(InitScriptInfoDto, self).__init__(**kwargs)
self.dbfs = kwargs.get('dbfs', None)
class InnerErrorDetails(msrest.serialization.Model):
"""InnerErrorDetails.
:ivar code:
:vartype code: str
:ivar message:
:vartype message: str
:ivar target:
:vartype target: str
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword message:
:paramtype message: str
:keyword target:
:paramtype target: str
"""
super(InnerErrorDetails, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.target = kwargs.get('target', None)
class InnerErrorResponse(msrest.serialization.Model):
"""A nested structure of errors.
:ivar code: The error code.
:vartype code: str
:ivar inner_error: A nested structure of errors.
:vartype inner_error: ~flow.models.InnerErrorResponse
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code: The error code.
:paramtype code: str
:keyword inner_error: A nested structure of errors.
:paramtype inner_error: ~flow.models.InnerErrorResponse
"""
super(InnerErrorResponse, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.inner_error = kwargs.get('inner_error', None)
class InputAsset(msrest.serialization.Model):
"""InputAsset.
:ivar asset:
:vartype asset: ~flow.models.Asset
:ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:vartype mechanism: str or ~flow.models.DeliveryMechanism
:ivar environment_variable_name:
:vartype environment_variable_name: str
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar options: Dictionary of :code:`<string>`.
:vartype options: dict[str, str]
"""
_attribute_map = {
'asset': {'key': 'asset', 'type': 'Asset'},
'mechanism': {'key': 'mechanism', 'type': 'str'},
'environment_variable_name': {'key': 'environmentVariableName', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'options': {'key': 'options', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword asset:
:paramtype asset: ~flow.models.Asset
:keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:paramtype mechanism: str or ~flow.models.DeliveryMechanism
:keyword environment_variable_name:
:paramtype environment_variable_name: str
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword options: Dictionary of :code:`<string>`.
:paramtype options: dict[str, str]
"""
super(InputAsset, self).__init__(**kwargs)
self.asset = kwargs.get('asset', None)
self.mechanism = kwargs.get('mechanism', None)
self.environment_variable_name = kwargs.get('environment_variable_name', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.overwrite = kwargs.get('overwrite', None)
self.options = kwargs.get('options', None)
class InputData(msrest.serialization.Model):
"""InputData.
:ivar dataset_id:
:vartype dataset_id: str
:ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:vartype mode: str or ~flow.models.DataBindingMode
:ivar value:
:vartype value: str
"""
_attribute_map = {
'dataset_id': {'key': 'datasetId', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword dataset_id:
:paramtype dataset_id: str
:keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:paramtype mode: str or ~flow.models.DataBindingMode
:keyword value:
:paramtype value: str
"""
super(InputData, self).__init__(**kwargs)
self.dataset_id = kwargs.get('dataset_id', None)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class InputDataBinding(msrest.serialization.Model):
"""InputDataBinding.
:ivar data_id:
:vartype data_id: str
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:vartype mode: str or ~flow.models.DataBindingMode
:ivar description:
:vartype description: str
:ivar uri:
:vartype uri: ~flow.models.MfeInternalUriReference
:ivar value:
:vartype value: str
:ivar asset_uri:
:vartype asset_uri: str
:ivar job_input_type: Possible values include: "Dataset", "Uri", "Literal", "UriFile",
"UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:vartype job_input_type: str or ~flow.models.JobInputType
"""
_attribute_map = {
'data_id': {'key': 'dataId', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'MfeInternalUriReference'},
'value': {'key': 'value', 'type': 'str'},
'asset_uri': {'key': 'assetUri', 'type': 'str'},
'job_input_type': {'key': 'jobInputType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_id:
:paramtype data_id: str
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:paramtype mode: str or ~flow.models.DataBindingMode
:keyword description:
:paramtype description: str
:keyword uri:
:paramtype uri: ~flow.models.MfeInternalUriReference
:keyword value:
:paramtype value: str
:keyword asset_uri:
:paramtype asset_uri: str
:keyword job_input_type: Possible values include: "Dataset", "Uri", "Literal", "UriFile",
"UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:paramtype job_input_type: str or ~flow.models.JobInputType
"""
super(InputDataBinding, self).__init__(**kwargs)
self.data_id = kwargs.get('data_id', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.mode = kwargs.get('mode', None)
self.description = kwargs.get('description', None)
self.uri = kwargs.get('uri', None)
self.value = kwargs.get('value', None)
self.asset_uri = kwargs.get('asset_uri', None)
self.job_input_type = kwargs.get('job_input_type', None)
class InputDefinition(msrest.serialization.Model):
"""InputDefinition.
:ivar name:
:vartype name: str
:ivar type:
:vartype type: list[str or ~flow.models.ValueType]
:ivar default: Anything.
:vartype default: any
:ivar description:
:vartype description: str
:ivar enum:
:vartype enum: list[str]
:ivar enabled_by:
:vartype enabled_by: str
:ivar enabled_by_type:
:vartype enabled_by_type: list[str or ~flow.models.ValueType]
:ivar enabled_by_value:
:vartype enabled_by_value: list[any]
:ivar model_list:
:vartype model_list: list[str]
:ivar capabilities:
:vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:ivar dynamic_list:
:vartype dynamic_list: ~flow.models.ToolInputDynamicList
:ivar allow_manual_entry:
:vartype allow_manual_entry: bool
:ivar is_multi_select:
:vartype is_multi_select: bool
:ivar generated_by:
:vartype generated_by: ~flow.models.ToolInputGeneratedBy
:ivar input_type: Possible values include: "default", "uionly_hidden".
:vartype input_type: str or ~flow.models.InputType
:ivar advanced:
:vartype advanced: bool
:ivar ui_hints: This is a dictionary.
:vartype ui_hints: dict[str, any]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': '[str]'},
'default': {'key': 'default', 'type': 'object'},
'description': {'key': 'description', 'type': 'str'},
'enum': {'key': 'enum', 'type': '[str]'},
'enabled_by': {'key': 'enabled_by', 'type': 'str'},
'enabled_by_type': {'key': 'enabled_by_type', 'type': '[str]'},
'enabled_by_value': {'key': 'enabled_by_value', 'type': '[object]'},
'model_list': {'key': 'model_list', 'type': '[str]'},
'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'},
'dynamic_list': {'key': 'dynamic_list', 'type': 'ToolInputDynamicList'},
'allow_manual_entry': {'key': 'allow_manual_entry', 'type': 'bool'},
'is_multi_select': {'key': 'is_multi_select', 'type': 'bool'},
'generated_by': {'key': 'generated_by', 'type': 'ToolInputGeneratedBy'},
'input_type': {'key': 'input_type', 'type': 'str'},
'advanced': {'key': 'advanced', 'type': 'bool'},
'ui_hints': {'key': 'ui_hints', 'type': '{object}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type:
:paramtype type: list[str or ~flow.models.ValueType]
:keyword default: Anything.
:paramtype default: any
:keyword description:
:paramtype description: str
:keyword enum:
:paramtype enum: list[str]
:keyword enabled_by:
:paramtype enabled_by: str
:keyword enabled_by_type:
:paramtype enabled_by_type: list[str or ~flow.models.ValueType]
:keyword enabled_by_value:
:paramtype enabled_by_value: list[any]
:keyword model_list:
:paramtype model_list: list[str]
:keyword capabilities:
:paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:keyword dynamic_list:
:paramtype dynamic_list: ~flow.models.ToolInputDynamicList
:keyword allow_manual_entry:
:paramtype allow_manual_entry: bool
:keyword is_multi_select:
:paramtype is_multi_select: bool
:keyword generated_by:
:paramtype generated_by: ~flow.models.ToolInputGeneratedBy
:keyword input_type: Possible values include: "default", "uionly_hidden".
:paramtype input_type: str or ~flow.models.InputType
:keyword advanced:
:paramtype advanced: bool
:keyword ui_hints: This is a dictionary.
:paramtype ui_hints: dict[str, any]
"""
super(InputDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.default = kwargs.get('default', None)
self.description = kwargs.get('description', None)
self.enum = kwargs.get('enum', None)
self.enabled_by = kwargs.get('enabled_by', None)
self.enabled_by_type = kwargs.get('enabled_by_type', None)
self.enabled_by_value = kwargs.get('enabled_by_value', None)
self.model_list = kwargs.get('model_list', None)
self.capabilities = kwargs.get('capabilities', None)
self.dynamic_list = kwargs.get('dynamic_list', None)
self.allow_manual_entry = kwargs.get('allow_manual_entry', None)
self.is_multi_select = kwargs.get('is_multi_select', None)
self.generated_by = kwargs.get('generated_by', None)
self.input_type = kwargs.get('input_type', None)
self.advanced = kwargs.get('advanced', None)
self.ui_hints = kwargs.get('ui_hints', None)
class InputOutputPortMetadata(msrest.serialization.Model):
"""InputOutputPortMetadata.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar graph_module_node_id:
:vartype graph_module_node_id: str
:ivar port_name:
:vartype port_name: str
:ivar schema:
:vartype schema: str
:ivar name:
:vartype name: str
:ivar id:
:vartype id: str
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'graph_module_node_id': {'key': 'graphModuleNodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'schema': {'key': 'schema', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword graph_module_node_id:
:paramtype graph_module_node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword schema:
:paramtype schema: str
:keyword name:
:paramtype name: str
"""
super(InputOutputPortMetadata, self).__init__(**kwargs)
self.graph_module_node_id = kwargs.get('graph_module_node_id', None)
self.port_name = kwargs.get('port_name', None)
self.schema = kwargs.get('schema', None)
self.name = kwargs.get('name', None)
self.id = None
class InputSetting(msrest.serialization.Model):
"""InputSetting.
:ivar name:
:vartype name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar options: This is a dictionary.
:vartype options: dict[str, str]
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'options': {'key': 'options', 'type': '{str}'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword options: This is a dictionary.
:paramtype options: dict[str, str]
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(InputSetting, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.options = kwargs.get('options', None)
self.additional_transformations = kwargs.get('additional_transformations', None)
class IntellectualPropertyPublisherInformation(msrest.serialization.Model):
"""IntellectualPropertyPublisherInformation.
:ivar intellectual_property_publisher:
:vartype intellectual_property_publisher: str
"""
_attribute_map = {
'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword intellectual_property_publisher:
:paramtype intellectual_property_publisher: str
"""
super(IntellectualPropertyPublisherInformation, self).__init__(**kwargs)
self.intellectual_property_publisher = kwargs.get('intellectual_property_publisher', None)
class InteractiveConfig(msrest.serialization.Model):
"""InteractiveConfig.
:ivar is_ssh_enabled:
:vartype is_ssh_enabled: bool
:ivar ssh_public_key:
:vartype ssh_public_key: str
:ivar is_i_python_enabled:
:vartype is_i_python_enabled: bool
:ivar is_tensor_board_enabled:
:vartype is_tensor_board_enabled: bool
:ivar interactive_port:
:vartype interactive_port: int
"""
_attribute_map = {
'is_ssh_enabled': {'key': 'isSSHEnabled', 'type': 'bool'},
'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'},
'is_i_python_enabled': {'key': 'isIPythonEnabled', 'type': 'bool'},
'is_tensor_board_enabled': {'key': 'isTensorBoardEnabled', 'type': 'bool'},
'interactive_port': {'key': 'interactivePort', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword is_ssh_enabled:
:paramtype is_ssh_enabled: bool
:keyword ssh_public_key:
:paramtype ssh_public_key: str
:keyword is_i_python_enabled:
:paramtype is_i_python_enabled: bool
:keyword is_tensor_board_enabled:
:paramtype is_tensor_board_enabled: bool
:keyword interactive_port:
:paramtype interactive_port: int
"""
super(InteractiveConfig, self).__init__(**kwargs)
self.is_ssh_enabled = kwargs.get('is_ssh_enabled', None)
self.ssh_public_key = kwargs.get('ssh_public_key', None)
self.is_i_python_enabled = kwargs.get('is_i_python_enabled', None)
self.is_tensor_board_enabled = kwargs.get('is_tensor_board_enabled', None)
self.interactive_port = kwargs.get('interactive_port', None)
class InteractiveConfiguration(msrest.serialization.Model):
"""InteractiveConfiguration.
:ivar is_ssh_enabled:
:vartype is_ssh_enabled: bool
:ivar ssh_public_key:
:vartype ssh_public_key: str
:ivar is_i_python_enabled:
:vartype is_i_python_enabled: bool
:ivar is_tensor_board_enabled:
:vartype is_tensor_board_enabled: bool
:ivar interactive_port:
:vartype interactive_port: int
"""
_attribute_map = {
'is_ssh_enabled': {'key': 'isSSHEnabled', 'type': 'bool'},
'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'},
'is_i_python_enabled': {'key': 'isIPythonEnabled', 'type': 'bool'},
'is_tensor_board_enabled': {'key': 'isTensorBoardEnabled', 'type': 'bool'},
'interactive_port': {'key': 'interactivePort', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword is_ssh_enabled:
:paramtype is_ssh_enabled: bool
:keyword ssh_public_key:
:paramtype ssh_public_key: str
:keyword is_i_python_enabled:
:paramtype is_i_python_enabled: bool
:keyword is_tensor_board_enabled:
:paramtype is_tensor_board_enabled: bool
:keyword interactive_port:
:paramtype interactive_port: int
"""
super(InteractiveConfiguration, self).__init__(**kwargs)
self.is_ssh_enabled = kwargs.get('is_ssh_enabled', None)
self.ssh_public_key = kwargs.get('ssh_public_key', None)
self.is_i_python_enabled = kwargs.get('is_i_python_enabled', None)
self.is_tensor_board_enabled = kwargs.get('is_tensor_board_enabled', None)
self.interactive_port = kwargs.get('interactive_port', None)
class JobCost(msrest.serialization.Model):
"""JobCost.
:ivar charged_cpu_core_seconds:
:vartype charged_cpu_core_seconds: float
:ivar charged_cpu_memory_megabyte_seconds:
:vartype charged_cpu_memory_megabyte_seconds: float
:ivar charged_gpu_seconds:
:vartype charged_gpu_seconds: float
:ivar charged_node_utilization_seconds:
:vartype charged_node_utilization_seconds: float
"""
_attribute_map = {
'charged_cpu_core_seconds': {'key': 'chargedCpuCoreSeconds', 'type': 'float'},
'charged_cpu_memory_megabyte_seconds': {'key': 'chargedCpuMemoryMegabyteSeconds', 'type': 'float'},
'charged_gpu_seconds': {'key': 'chargedGpuSeconds', 'type': 'float'},
'charged_node_utilization_seconds': {'key': 'chargedNodeUtilizationSeconds', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
"""
:keyword charged_cpu_core_seconds:
:paramtype charged_cpu_core_seconds: float
:keyword charged_cpu_memory_megabyte_seconds:
:paramtype charged_cpu_memory_megabyte_seconds: float
:keyword charged_gpu_seconds:
:paramtype charged_gpu_seconds: float
:keyword charged_node_utilization_seconds:
:paramtype charged_node_utilization_seconds: float
"""
super(JobCost, self).__init__(**kwargs)
self.charged_cpu_core_seconds = kwargs.get('charged_cpu_core_seconds', None)
self.charged_cpu_memory_megabyte_seconds = kwargs.get('charged_cpu_memory_megabyte_seconds', None)
self.charged_gpu_seconds = kwargs.get('charged_gpu_seconds', None)
self.charged_node_utilization_seconds = kwargs.get('charged_node_utilization_seconds', None)
class JobEndpoint(msrest.serialization.Model):
"""JobEndpoint.
:ivar type:
:vartype type: str
:ivar port:
:vartype port: int
:ivar endpoint:
:vartype endpoint: str
:ivar status:
:vartype status: str
:ivar error_message:
:vartype error_message: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar nodes:
:vartype nodes: ~flow.models.MfeInternalNodes
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
'endpoint': {'key': 'endpoint', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'nodes': {'key': 'nodes', 'type': 'MfeInternalNodes'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type:
:paramtype type: str
:keyword port:
:paramtype port: int
:keyword endpoint:
:paramtype endpoint: str
:keyword status:
:paramtype status: str
:keyword error_message:
:paramtype error_message: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword nodes:
:paramtype nodes: ~flow.models.MfeInternalNodes
"""
super(JobEndpoint, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.port = kwargs.get('port', None)
self.endpoint = kwargs.get('endpoint', None)
self.status = kwargs.get('status', None)
self.error_message = kwargs.get('error_message', None)
self.properties = kwargs.get('properties', None)
self.nodes = kwargs.get('nodes', None)
class JobInput(msrest.serialization.Model):
"""JobInput.
All required parameters must be populated in order to send to Azure.
:ivar job_input_type: Required. Possible values include: "Dataset", "Uri", "Literal",
"UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:vartype job_input_type: str or ~flow.models.JobInputType
:ivar description:
:vartype description: str
"""
_validation = {
'job_input_type': {'required': True},
}
_attribute_map = {
'job_input_type': {'key': 'jobInputType', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_input_type: Required. Possible values include: "Dataset", "Uri", "Literal",
"UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:paramtype job_input_type: str or ~flow.models.JobInputType
:keyword description:
:paramtype description: str
"""
super(JobInput, self).__init__(**kwargs)
self.job_input_type = kwargs['job_input_type']
self.description = kwargs.get('description', None)
class JobOutput(msrest.serialization.Model):
"""JobOutput.
All required parameters must be populated in order to send to Azure.
:ivar job_output_type: Required. Possible values include: "Uri", "Dataset", "UriFile",
"UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:vartype job_output_type: str or ~flow.models.JobOutputType
:ivar description:
:vartype description: str
:ivar auto_delete_setting:
:vartype auto_delete_setting: ~flow.models.AutoDeleteSetting
"""
_validation = {
'job_output_type': {'required': True},
}
_attribute_map = {
'job_output_type': {'key': 'jobOutputType', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_output_type: Required. Possible values include: "Uri", "Dataset", "UriFile",
"UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:paramtype job_output_type: str or ~flow.models.JobOutputType
:keyword description:
:paramtype description: str
:keyword auto_delete_setting:
:paramtype auto_delete_setting: ~flow.models.AutoDeleteSetting
"""
super(JobOutput, self).__init__(**kwargs)
self.job_output_type = kwargs['job_output_type']
self.description = kwargs.get('description', None)
self.auto_delete_setting = kwargs.get('auto_delete_setting', None)
class JobOutputArtifacts(msrest.serialization.Model):
"""JobOutputArtifacts.
:ivar datastore_id:
:vartype datastore_id: str
:ivar path:
:vartype path: str
"""
_attribute_map = {
'datastore_id': {'key': 'datastoreId', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword datastore_id:
:paramtype datastore_id: str
:keyword path:
:paramtype path: str
"""
super(JobOutputArtifacts, self).__init__(**kwargs)
self.datastore_id = kwargs.get('datastore_id', None)
self.path = kwargs.get('path', None)
class JobScheduleDto(msrest.serialization.Model):
"""JobScheduleDto.
:ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:vartype job_type: str or ~flow.models.JobType
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar name:
:vartype name: str
:ivar job_definition_id:
:vartype job_definition_id: str
:ivar display_name:
:vartype display_name: str
:ivar trigger_type: Possible values include: "Recurrence", "Cron".
:vartype trigger_type: str or ~flow.models.TriggerType
:ivar recurrence:
:vartype recurrence: ~flow.models.Recurrence
:ivar cron:
:vartype cron: ~flow.models.Cron
:ivar status: Possible values include: "Enabled", "Disabled".
:vartype status: str or ~flow.models.ScheduleStatus
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'job_type': {'key': 'jobType', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'name': {'key': 'name', 'type': 'str'},
'job_definition_id': {'key': 'jobDefinitionId', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'trigger_type': {'key': 'triggerType', 'type': 'str'},
'recurrence': {'key': 'recurrence', 'type': 'Recurrence'},
'cron': {'key': 'cron', 'type': 'Cron'},
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:paramtype job_type: str or ~flow.models.JobType
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword name:
:paramtype name: str
:keyword job_definition_id:
:paramtype job_definition_id: str
:keyword display_name:
:paramtype display_name: str
:keyword trigger_type: Possible values include: "Recurrence", "Cron".
:paramtype trigger_type: str or ~flow.models.TriggerType
:keyword recurrence:
:paramtype recurrence: ~flow.models.Recurrence
:keyword cron:
:paramtype cron: ~flow.models.Cron
:keyword status: Possible values include: "Enabled", "Disabled".
:paramtype status: str or ~flow.models.ScheduleStatus
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(JobScheduleDto, self).__init__(**kwargs)
self.job_type = kwargs.get('job_type', None)
self.system_data = kwargs.get('system_data', None)
self.name = kwargs.get('name', None)
self.job_definition_id = kwargs.get('job_definition_id', None)
self.display_name = kwargs.get('display_name', None)
self.trigger_type = kwargs.get('trigger_type', None)
self.recurrence = kwargs.get('recurrence', None)
self.cron = kwargs.get('cron', None)
self.status = kwargs.get('status', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
class K8SConfiguration(msrest.serialization.Model):
"""K8SConfiguration.
:ivar max_retry_count:
:vartype max_retry_count: int
:ivar resource_configuration:
:vartype resource_configuration: ~flow.models.ResourceConfig
:ivar priority_configuration:
:vartype priority_configuration: ~flow.models.PriorityConfig
:ivar interactive_configuration:
:vartype interactive_configuration: ~flow.models.InteractiveConfig
"""
_attribute_map = {
'max_retry_count': {'key': 'maxRetryCount', 'type': 'int'},
'resource_configuration': {'key': 'resourceConfiguration', 'type': 'ResourceConfig'},
'priority_configuration': {'key': 'priorityConfiguration', 'type': 'PriorityConfig'},
'interactive_configuration': {'key': 'interactiveConfiguration', 'type': 'InteractiveConfig'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_retry_count:
:paramtype max_retry_count: int
:keyword resource_configuration:
:paramtype resource_configuration: ~flow.models.ResourceConfig
:keyword priority_configuration:
:paramtype priority_configuration: ~flow.models.PriorityConfig
:keyword interactive_configuration:
:paramtype interactive_configuration: ~flow.models.InteractiveConfig
"""
super(K8SConfiguration, self).__init__(**kwargs)
self.max_retry_count = kwargs.get('max_retry_count', None)
self.resource_configuration = kwargs.get('resource_configuration', None)
self.priority_configuration = kwargs.get('priority_configuration', None)
self.interactive_configuration = kwargs.get('interactive_configuration', None)
class KeyValuePairComponentNameMetaInfoErrorResponse(msrest.serialization.Model):
"""KeyValuePairComponentNameMetaInfoErrorResponse.
:ivar key:
:vartype key: ~flow.models.ComponentNameMetaInfo
:ivar value: The error response.
:vartype value: ~flow.models.ErrorResponse
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'ComponentNameMetaInfo'},
'value': {'key': 'value', 'type': 'ErrorResponse'},
}
def __init__(
self,
**kwargs
):
"""
:keyword key:
:paramtype key: ~flow.models.ComponentNameMetaInfo
:keyword value: The error response.
:paramtype value: ~flow.models.ErrorResponse
"""
super(KeyValuePairComponentNameMetaInfoErrorResponse, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.value = kwargs.get('value', None)
class KeyValuePairComponentNameMetaInfoModuleDto(msrest.serialization.Model):
"""KeyValuePairComponentNameMetaInfoModuleDto.
:ivar key:
:vartype key: ~flow.models.ComponentNameMetaInfo
:ivar value:
:vartype value: ~flow.models.ModuleDto
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'ComponentNameMetaInfo'},
'value': {'key': 'value', 'type': 'ModuleDto'},
}
def __init__(
self,
**kwargs
):
"""
:keyword key:
:paramtype key: ~flow.models.ComponentNameMetaInfo
:keyword value:
:paramtype value: ~flow.models.ModuleDto
"""
super(KeyValuePairComponentNameMetaInfoModuleDto, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.value = kwargs.get('value', None)
class KeyValuePairStringObject(msrest.serialization.Model):
"""KeyValuePairStringObject.
:ivar key:
:vartype key: str
:ivar value: Anything.
:vartype value: any
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'str'},
'value': {'key': 'value', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword key:
:paramtype key: str
:keyword value: Anything.
:paramtype value: any
"""
super(KeyValuePairStringObject, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.value = kwargs.get('value', None)
class KubernetesConfiguration(msrest.serialization.Model):
"""KubernetesConfiguration.
:ivar instance_type:
:vartype instance_type: str
"""
_attribute_map = {
'instance_type': {'key': 'instanceType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword instance_type:
:paramtype instance_type: str
"""
super(KubernetesConfiguration, self).__init__(**kwargs)
self.instance_type = kwargs.get('instance_type', None)
class Kwarg(msrest.serialization.Model):
"""Kwarg.
:ivar key:
:vartype key: str
:ivar value:
:vartype value: str
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword key:
:paramtype key: str
:keyword value:
:paramtype value: str
"""
super(Kwarg, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.value = kwargs.get('value', None)
class LegacyDataPath(msrest.serialization.Model):
"""LegacyDataPath.
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword relative_path:
:paramtype relative_path: str
"""
super(LegacyDataPath, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.relative_path = kwargs.get('relative_path', None)
class LimitSettings(msrest.serialization.Model):
"""LimitSettings.
:ivar max_trials:
:vartype max_trials: int
:ivar timeout:
:vartype timeout: str
:ivar trial_timeout:
:vartype trial_timeout: str
:ivar max_concurrent_trials:
:vartype max_concurrent_trials: int
:ivar max_cores_per_trial:
:vartype max_cores_per_trial: int
:ivar exit_score:
:vartype exit_score: float
:ivar enable_early_termination:
:vartype enable_early_termination: bool
:ivar max_nodes:
:vartype max_nodes: int
"""
_attribute_map = {
'max_trials': {'key': 'maxTrials', 'type': 'int'},
'timeout': {'key': 'timeout', 'type': 'str'},
'trial_timeout': {'key': 'trialTimeout', 'type': 'str'},
'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'},
'max_cores_per_trial': {'key': 'maxCoresPerTrial', 'type': 'int'},
'exit_score': {'key': 'exitScore', 'type': 'float'},
'enable_early_termination': {'key': 'enableEarlyTermination', 'type': 'bool'},
'max_nodes': {'key': 'maxNodes', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_trials:
:paramtype max_trials: int
:keyword timeout:
:paramtype timeout: str
:keyword trial_timeout:
:paramtype trial_timeout: str
:keyword max_concurrent_trials:
:paramtype max_concurrent_trials: int
:keyword max_cores_per_trial:
:paramtype max_cores_per_trial: int
:keyword exit_score:
:paramtype exit_score: float
:keyword enable_early_termination:
:paramtype enable_early_termination: bool
:keyword max_nodes:
:paramtype max_nodes: int
"""
super(LimitSettings, self).__init__(**kwargs)
self.max_trials = kwargs.get('max_trials', None)
self.timeout = kwargs.get('timeout', None)
self.trial_timeout = kwargs.get('trial_timeout', None)
self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None)
self.max_cores_per_trial = kwargs.get('max_cores_per_trial', None)
self.exit_score = kwargs.get('exit_score', None)
self.enable_early_termination = kwargs.get('enable_early_termination', None)
self.max_nodes = kwargs.get('max_nodes', None)
class LinkedADBWorkspaceMetadata(msrest.serialization.Model):
"""LinkedADBWorkspaceMetadata.
:ivar workspace_id:
:vartype workspace_id: str
:ivar region:
:vartype region: str
"""
_attribute_map = {
'workspace_id': {'key': 'workspaceId', 'type': 'str'},
'region': {'key': 'region', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword workspace_id:
:paramtype workspace_id: str
:keyword region:
:paramtype region: str
"""
super(LinkedADBWorkspaceMetadata, self).__init__(**kwargs)
self.workspace_id = kwargs.get('workspace_id', None)
self.region = kwargs.get('region', None)
class LinkedPipelineInfo(msrest.serialization.Model):
"""LinkedPipelineInfo.
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar module_node_id:
:vartype module_node_id: str
:ivar port_name:
:vartype port_name: str
:ivar linked_pipeline_draft_id:
:vartype linked_pipeline_draft_id: str
:ivar linked_pipeline_run_id:
:vartype linked_pipeline_run_id: str
:ivar is_direct_link:
:vartype is_direct_link: bool
"""
_attribute_map = {
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'module_node_id': {'key': 'moduleNodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'},
'linked_pipeline_run_id': {'key': 'linkedPipelineRunId', 'type': 'str'},
'is_direct_link': {'key': 'isDirectLink', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword module_node_id:
:paramtype module_node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword linked_pipeline_draft_id:
:paramtype linked_pipeline_draft_id: str
:keyword linked_pipeline_run_id:
:paramtype linked_pipeline_run_id: str
:keyword is_direct_link:
:paramtype is_direct_link: bool
"""
super(LinkedPipelineInfo, self).__init__(**kwargs)
self.pipeline_type = kwargs.get('pipeline_type', None)
self.module_node_id = kwargs.get('module_node_id', None)
self.port_name = kwargs.get('port_name', None)
self.linked_pipeline_draft_id = kwargs.get('linked_pipeline_draft_id', None)
self.linked_pipeline_run_id = kwargs.get('linked_pipeline_run_id', None)
self.is_direct_link = kwargs.get('is_direct_link', None)
class LoadFlowAsComponentRequest(msrest.serialization.Model):
"""LoadFlowAsComponentRequest.
:ivar component_name:
:vartype component_name: str
:ivar component_version:
:vartype component_version: str
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar is_deterministic:
:vartype is_deterministic: bool
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar flow_definition_resource_id:
:vartype flow_definition_resource_id: str
:ivar flow_definition_data_store_name:
:vartype flow_definition_data_store_name: str
:ivar flow_definition_blob_path:
:vartype flow_definition_blob_path: str
:ivar flow_definition_data_uri:
:vartype flow_definition_data_uri: str
:ivar node_variant:
:vartype node_variant: str
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar connections: This is a dictionary.
:vartype connections: dict[str, dict[str, str]]
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, str]
:ivar runtime_name:
:vartype runtime_name: str
:ivar session_id:
:vartype session_id: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
"""
_attribute_map = {
'component_name': {'key': 'componentName', 'type': 'str'},
'component_version': {'key': 'componentVersion', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'flow_definition_resource_id': {'key': 'flowDefinitionResourceId', 'type': 'str'},
'flow_definition_data_store_name': {'key': 'flowDefinitionDataStoreName', 'type': 'str'},
'flow_definition_blob_path': {'key': 'flowDefinitionBlobPath', 'type': 'str'},
'flow_definition_data_uri': {'key': 'flowDefinitionDataUri', 'type': 'str'},
'node_variant': {'key': 'nodeVariant', 'type': 'str'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'connections': {'key': 'connections', 'type': '{{str}}'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'session_id': {'key': 'sessionId', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
"""
:keyword component_name:
:paramtype component_name: str
:keyword component_version:
:paramtype component_version: str
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword is_deterministic:
:paramtype is_deterministic: bool
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword flow_definition_resource_id:
:paramtype flow_definition_resource_id: str
:keyword flow_definition_data_store_name:
:paramtype flow_definition_data_store_name: str
:keyword flow_definition_blob_path:
:paramtype flow_definition_blob_path: str
:keyword flow_definition_data_uri:
:paramtype flow_definition_data_uri: str
:keyword node_variant:
:paramtype node_variant: str
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword connections: This is a dictionary.
:paramtype connections: dict[str, dict[str, str]]
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, str]
:keyword runtime_name:
:paramtype runtime_name: str
:keyword session_id:
:paramtype session_id: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
"""
super(LoadFlowAsComponentRequest, self).__init__(**kwargs)
self.component_name = kwargs.get('component_name', None)
self.component_version = kwargs.get('component_version', None)
self.display_name = kwargs.get('display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.is_deterministic = kwargs.get('is_deterministic', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.flow_definition_resource_id = kwargs.get('flow_definition_resource_id', None)
self.flow_definition_data_store_name = kwargs.get('flow_definition_data_store_name', None)
self.flow_definition_blob_path = kwargs.get('flow_definition_blob_path', None)
self.flow_definition_data_uri = kwargs.get('flow_definition_data_uri', None)
self.node_variant = kwargs.get('node_variant', None)
self.inputs_mapping = kwargs.get('inputs_mapping', None)
self.connections = kwargs.get('connections', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.session_id = kwargs.get('session_id', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
class LogRunTerminatedEventDto(msrest.serialization.Model):
"""LogRunTerminatedEventDto.
:ivar next_action_interval_in_seconds:
:vartype next_action_interval_in_seconds: int
:ivar action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent", "SubmitFlowRun".
:vartype action_type: str or ~flow.models.ActionType
:ivar last_checked_time:
:vartype last_checked_time: ~datetime.datetime
"""
_attribute_map = {
'next_action_interval_in_seconds': {'key': 'nextActionIntervalInSeconds', 'type': 'int'},
'action_type': {'key': 'actionType', 'type': 'str'},
'last_checked_time': {'key': 'lastCheckedTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword next_action_interval_in_seconds:
:paramtype next_action_interval_in_seconds: int
:keyword action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent", "SubmitFlowRun".
:paramtype action_type: str or ~flow.models.ActionType
:keyword last_checked_time:
:paramtype last_checked_time: ~datetime.datetime
"""
super(LogRunTerminatedEventDto, self).__init__(**kwargs)
self.next_action_interval_in_seconds = kwargs.get('next_action_interval_in_seconds', None)
self.action_type = kwargs.get('action_type', None)
self.last_checked_time = kwargs.get('last_checked_time', None)
class LongRunningOperationUriResponse(msrest.serialization.Model):
"""LongRunningOperationUriResponse.
:ivar location:
:vartype location: str
:ivar operation_result:
:vartype operation_result: str
"""
_attribute_map = {
'location': {'key': 'location', 'type': 'str'},
'operation_result': {'key': 'operationResult', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword location:
:paramtype location: str
:keyword operation_result:
:paramtype operation_result: str
"""
super(LongRunningOperationUriResponse, self).__init__(**kwargs)
self.location = kwargs.get('location', None)
self.operation_result = kwargs.get('operation_result', None)
class LongRunningUpdateRegistryComponentRequest(msrest.serialization.Model):
"""LongRunningUpdateRegistryComponentRequest.
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar registry_name:
:vartype registry_name: str
:ivar component_name:
:vartype component_name: str
:ivar component_version:
:vartype component_version: str
:ivar update_type: Possible values include: "EnableModule", "DisableModule",
"UpdateDisplayName", "UpdateDescription", "UpdateTags".
:vartype update_type: str or ~flow.models.LongRunningUpdateType
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'registry_name': {'key': 'registryName', 'type': 'str'},
'component_name': {'key': 'componentName', 'type': 'str'},
'component_version': {'key': 'componentVersion', 'type': 'str'},
'update_type': {'key': 'updateType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword registry_name:
:paramtype registry_name: str
:keyword component_name:
:paramtype component_name: str
:keyword component_version:
:paramtype component_version: str
:keyword update_type: Possible values include: "EnableModule", "DisableModule",
"UpdateDisplayName", "UpdateDescription", "UpdateTags".
:paramtype update_type: str or ~flow.models.LongRunningUpdateType
"""
super(LongRunningUpdateRegistryComponentRequest, self).__init__(**kwargs)
self.display_name = kwargs.get('display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.registry_name = kwargs.get('registry_name', None)
self.component_name = kwargs.get('component_name', None)
self.component_version = kwargs.get('component_version', None)
self.update_type = kwargs.get('update_type', None)
class ManagedServiceIdentity(msrest.serialization.Model):
"""ManagedServiceIdentity.
All required parameters must be populated in order to send to Azure.
:ivar type: Required. Possible values include: "SystemAssigned", "UserAssigned",
"SystemAssignedUserAssigned", "None".
:vartype type: str or ~flow.models.ManagedServiceIdentityType
:ivar principal_id:
:vartype principal_id: str
:ivar tenant_id:
:vartype tenant_id: str
:ivar user_assigned_identities: Dictionary of :code:`<UserAssignedIdentity>`.
:vartype user_assigned_identities: dict[str, ~flow.models.UserAssignedIdentity]
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Required. Possible values include: "SystemAssigned", "UserAssigned",
"SystemAssignedUserAssigned", "None".
:paramtype type: str or ~flow.models.ManagedServiceIdentityType
:keyword principal_id:
:paramtype principal_id: str
:keyword tenant_id:
:paramtype tenant_id: str
:keyword user_assigned_identities: Dictionary of :code:`<UserAssignedIdentity>`.
:paramtype user_assigned_identities: dict[str, ~flow.models.UserAssignedIdentity]
"""
super(ManagedServiceIdentity, self).__init__(**kwargs)
self.type = kwargs['type']
self.principal_id = kwargs.get('principal_id', None)
self.tenant_id = kwargs.get('tenant_id', None)
self.user_assigned_identities = kwargs.get('user_assigned_identities', None)
class MavenLibraryDto(msrest.serialization.Model):
"""MavenLibraryDto.
:ivar coordinates:
:vartype coordinates: str
:ivar repo:
:vartype repo: str
:ivar exclusions:
:vartype exclusions: list[str]
"""
_attribute_map = {
'coordinates': {'key': 'coordinates', 'type': 'str'},
'repo': {'key': 'repo', 'type': 'str'},
'exclusions': {'key': 'exclusions', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword coordinates:
:paramtype coordinates: str
:keyword repo:
:paramtype repo: str
:keyword exclusions:
:paramtype exclusions: list[str]
"""
super(MavenLibraryDto, self).__init__(**kwargs)
self.coordinates = kwargs.get('coordinates', None)
self.repo = kwargs.get('repo', None)
self.exclusions = kwargs.get('exclusions', None)
class MetricProperties(msrest.serialization.Model):
"""MetricProperties.
:ivar ux_metric_type:
:vartype ux_metric_type: str
"""
_attribute_map = {
'ux_metric_type': {'key': 'uxMetricType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword ux_metric_type:
:paramtype ux_metric_type: str
"""
super(MetricProperties, self).__init__(**kwargs)
self.ux_metric_type = kwargs.get('ux_metric_type', None)
class MetricSchemaDto(msrest.serialization.Model):
"""MetricSchemaDto.
:ivar num_properties:
:vartype num_properties: int
:ivar properties:
:vartype properties: list[~flow.models.MetricSchemaPropertyDto]
"""
_attribute_map = {
'num_properties': {'key': 'numProperties', 'type': 'int'},
'properties': {'key': 'properties', 'type': '[MetricSchemaPropertyDto]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword num_properties:
:paramtype num_properties: int
:keyword properties:
:paramtype properties: list[~flow.models.MetricSchemaPropertyDto]
"""
super(MetricSchemaDto, self).__init__(**kwargs)
self.num_properties = kwargs.get('num_properties', None)
self.properties = kwargs.get('properties', None)
class MetricSchemaPropertyDto(msrest.serialization.Model):
"""MetricSchemaPropertyDto.
:ivar property_id:
:vartype property_id: str
:ivar name:
:vartype name: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'property_id': {'key': 'propertyId', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword property_id:
:paramtype property_id: str
:keyword name:
:paramtype name: str
:keyword type:
:paramtype type: str
"""
super(MetricSchemaPropertyDto, self).__init__(**kwargs)
self.property_id = kwargs.get('property_id', None)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
class MetricV2Dto(msrest.serialization.Model):
"""MetricV2Dto.
:ivar data_container_id:
:vartype data_container_id: str
:ivar name:
:vartype name: str
:ivar columns: This is a dictionary.
:vartype columns: dict[str, str or ~flow.models.MetricValueType]
:ivar properties:
:vartype properties: ~flow.models.MetricProperties
:ivar namespace:
:vartype namespace: str
:ivar standard_schema_id:
:vartype standard_schema_id: str
:ivar value:
:vartype value: list[~flow.models.MetricV2Value]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'columns': {'key': 'columns', 'type': '{str}'},
'properties': {'key': 'properties', 'type': 'MetricProperties'},
'namespace': {'key': 'namespace', 'type': 'str'},
'standard_schema_id': {'key': 'standardSchemaId', 'type': 'str'},
'value': {'key': 'value', 'type': '[MetricV2Value]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_container_id:
:paramtype data_container_id: str
:keyword name:
:paramtype name: str
:keyword columns: This is a dictionary.
:paramtype columns: dict[str, str or ~flow.models.MetricValueType]
:keyword properties:
:paramtype properties: ~flow.models.MetricProperties
:keyword namespace:
:paramtype namespace: str
:keyword standard_schema_id:
:paramtype standard_schema_id: str
:keyword value:
:paramtype value: list[~flow.models.MetricV2Value]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(MetricV2Dto, self).__init__(**kwargs)
self.data_container_id = kwargs.get('data_container_id', None)
self.name = kwargs.get('name', None)
self.columns = kwargs.get('columns', None)
self.properties = kwargs.get('properties', None)
self.namespace = kwargs.get('namespace', None)
self.standard_schema_id = kwargs.get('standard_schema_id', None)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.next_link = kwargs.get('next_link', None)
class MetricV2Value(msrest.serialization.Model):
"""MetricV2Value.
:ivar metric_id:
:vartype metric_id: str
:ivar created_utc:
:vartype created_utc: ~datetime.datetime
:ivar step:
:vartype step: long
:ivar data: Dictionary of :code:`<any>`.
:vartype data: dict[str, any]
:ivar sas_uri:
:vartype sas_uri: str
"""
_attribute_map = {
'metric_id': {'key': 'metricId', 'type': 'str'},
'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
'step': {'key': 'step', 'type': 'long'},
'data': {'key': 'data', 'type': '{object}'},
'sas_uri': {'key': 'sasUri', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword metric_id:
:paramtype metric_id: str
:keyword created_utc:
:paramtype created_utc: ~datetime.datetime
:keyword step:
:paramtype step: long
:keyword data: Dictionary of :code:`<any>`.
:paramtype data: dict[str, any]
:keyword sas_uri:
:paramtype sas_uri: str
"""
super(MetricV2Value, self).__init__(**kwargs)
self.metric_id = kwargs.get('metric_id', None)
self.created_utc = kwargs.get('created_utc', None)
self.step = kwargs.get('step', None)
self.data = kwargs.get('data', None)
self.sas_uri = kwargs.get('sas_uri', None)
class MfeInternalAutologgerSettings(msrest.serialization.Model):
"""MfeInternalAutologgerSettings.
:ivar mlflow_autologger: Possible values include: "Enabled", "Disabled".
:vartype mlflow_autologger: str or ~flow.models.MfeInternalMLFlowAutologgerState
"""
_attribute_map = {
'mlflow_autologger': {'key': 'mlflowAutologger', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mlflow_autologger: Possible values include: "Enabled", "Disabled".
:paramtype mlflow_autologger: str or ~flow.models.MfeInternalMLFlowAutologgerState
"""
super(MfeInternalAutologgerSettings, self).__init__(**kwargs)
self.mlflow_autologger = kwargs.get('mlflow_autologger', None)
class MfeInternalIdentityConfiguration(msrest.serialization.Model):
"""MfeInternalIdentityConfiguration.
:ivar identity_type: Possible values include: "Managed", "AMLToken", "UserIdentity".
:vartype identity_type: str or ~flow.models.MfeInternalIdentityType
"""
_attribute_map = {
'identity_type': {'key': 'identityType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identity_type: Possible values include: "Managed", "AMLToken", "UserIdentity".
:paramtype identity_type: str or ~flow.models.MfeInternalIdentityType
"""
super(MfeInternalIdentityConfiguration, self).__init__(**kwargs)
self.identity_type = kwargs.get('identity_type', None)
class MfeInternalNodes(msrest.serialization.Model):
"""MfeInternalNodes.
:ivar nodes_value_type: The only acceptable values to pass in are None and "All". The default
value is None.
:vartype nodes_value_type: str
"""
_attribute_map = {
'nodes_value_type': {'key': 'nodesValueType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword nodes_value_type: The only acceptable values to pass in are None and "All". The
default value is None.
:paramtype nodes_value_type: str
"""
super(MfeInternalNodes, self).__init__(**kwargs)
self.nodes_value_type = kwargs.get('nodes_value_type', None)
class MfeInternalOutputData(msrest.serialization.Model):
"""MfeInternalOutputData.
:ivar dataset_name:
:vartype dataset_name: str
:ivar datastore:
:vartype datastore: str
:ivar datapath:
:vartype datapath: str
:ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:vartype mode: str or ~flow.models.DataBindingMode
"""
_attribute_map = {
'dataset_name': {'key': 'datasetName', 'type': 'str'},
'datastore': {'key': 'datastore', 'type': 'str'},
'datapath': {'key': 'datapath', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword dataset_name:
:paramtype dataset_name: str
:keyword datastore:
:paramtype datastore: str
:keyword datapath:
:paramtype datapath: str
:keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:paramtype mode: str or ~flow.models.DataBindingMode
"""
super(MfeInternalOutputData, self).__init__(**kwargs)
self.dataset_name = kwargs.get('dataset_name', None)
self.datastore = kwargs.get('datastore', None)
self.datapath = kwargs.get('datapath', None)
self.mode = kwargs.get('mode', None)
class MfeInternalSecretConfiguration(msrest.serialization.Model):
"""MfeInternalSecretConfiguration.
:ivar workspace_secret_name:
:vartype workspace_secret_name: str
:ivar uri:
:vartype uri: str
"""
_attribute_map = {
'workspace_secret_name': {'key': 'workspaceSecretName', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword workspace_secret_name:
:paramtype workspace_secret_name: str
:keyword uri:
:paramtype uri: str
"""
super(MfeInternalSecretConfiguration, self).__init__(**kwargs)
self.workspace_secret_name = kwargs.get('workspace_secret_name', None)
self.uri = kwargs.get('uri', None)
class MfeInternalUriReference(msrest.serialization.Model):
"""MfeInternalUriReference.
:ivar file:
:vartype file: str
:ivar folder:
:vartype folder: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'folder': {'key': 'folder', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword folder:
:paramtype folder: str
"""
super(MfeInternalUriReference, self).__init__(**kwargs)
self.file = kwargs.get('file', None)
self.folder = kwargs.get('folder', None)
class MfeInternalV20211001ComponentJob(msrest.serialization.Model):
"""MfeInternalV20211001ComponentJob.
:ivar compute_id:
:vartype compute_id: str
:ivar component_id:
:vartype component_id: str
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.JobInput]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.JobOutput]
:ivar overrides: Anything.
:vartype overrides: any
"""
_attribute_map = {
'compute_id': {'key': 'computeId', 'type': 'str'},
'component_id': {'key': 'componentId', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '{JobInput}'},
'outputs': {'key': 'outputs', 'type': '{JobOutput}'},
'overrides': {'key': 'overrides', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword compute_id:
:paramtype compute_id: str
:keyword component_id:
:paramtype component_id: str
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.JobInput]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.JobOutput]
:keyword overrides: Anything.
:paramtype overrides: any
"""
super(MfeInternalV20211001ComponentJob, self).__init__(**kwargs)
self.compute_id = kwargs.get('compute_id', None)
self.component_id = kwargs.get('component_id', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.overrides = kwargs.get('overrides', None)
class MinMaxParameterRule(msrest.serialization.Model):
"""MinMaxParameterRule.
:ivar min:
:vartype min: float
:ivar max:
:vartype max: float
"""
_attribute_map = {
'min': {'key': 'min', 'type': 'float'},
'max': {'key': 'max', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
"""
:keyword min:
:paramtype min: float
:keyword max:
:paramtype max: float
"""
super(MinMaxParameterRule, self).__init__(**kwargs)
self.min = kwargs.get('min', None)
self.max = kwargs.get('max', None)
class MlcComputeInfo(msrest.serialization.Model):
"""MlcComputeInfo.
:ivar mlc_compute_type:
:vartype mlc_compute_type: str
"""
_attribute_map = {
'mlc_compute_type': {'key': 'mlcComputeType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mlc_compute_type:
:paramtype mlc_compute_type: str
"""
super(MlcComputeInfo, self).__init__(**kwargs)
self.mlc_compute_type = kwargs.get('mlc_compute_type', None)
class ModelDto(msrest.serialization.Model):
"""ModelDto.
:ivar feed_name:
:vartype feed_name: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
:ivar id:
:vartype id: str
:ivar version:
:vartype version: str
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar arm_id:
:vartype arm_id: str
:ivar online_endpoint_yaml_str:
:vartype online_endpoint_yaml_str: str
"""
_attribute_map = {
'feed_name': {'key': 'feedName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'arm_id': {'key': 'armId', 'type': 'str'},
'online_endpoint_yaml_str': {'key': 'onlineEndpointYamlStr', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword feed_name:
:paramtype feed_name: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
:keyword id:
:paramtype id: str
:keyword version:
:paramtype version: str
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword arm_id:
:paramtype arm_id: str
:keyword online_endpoint_yaml_str:
:paramtype online_endpoint_yaml_str: str
"""
super(ModelDto, self).__init__(**kwargs)
self.feed_name = kwargs.get('feed_name', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
self.id = kwargs.get('id', None)
self.version = kwargs.get('version', None)
self.system_data = kwargs.get('system_data', None)
self.arm_id = kwargs.get('arm_id', None)
self.online_endpoint_yaml_str = kwargs.get('online_endpoint_yaml_str', None)
class ModelManagementErrorResponse(msrest.serialization.Model):
"""ModelManagementErrorResponse.
:ivar code:
:vartype code: str
:ivar status_code:
:vartype status_code: int
:ivar message:
:vartype message: str
:ivar target:
:vartype target: str
:ivar details:
:vartype details: list[~flow.models.InnerErrorDetails]
:ivar correlation: Dictionary of :code:`<string>`.
:vartype correlation: dict[str, str]
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'status_code': {'key': 'statusCode', 'type': 'int'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[InnerErrorDetails]'},
'correlation': {'key': 'correlation', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword status_code:
:paramtype status_code: int
:keyword message:
:paramtype message: str
:keyword target:
:paramtype target: str
:keyword details:
:paramtype details: list[~flow.models.InnerErrorDetails]
:keyword correlation: Dictionary of :code:`<string>`.
:paramtype correlation: dict[str, str]
"""
super(ModelManagementErrorResponse, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.status_code = kwargs.get('status_code', None)
self.message = kwargs.get('message', None)
self.target = kwargs.get('target', None)
self.details = kwargs.get('details', None)
self.correlation = kwargs.get('correlation', None)
class ModifyPipelineJobScheduleDto(msrest.serialization.Model):
"""ModifyPipelineJobScheduleDto.
:ivar pipeline_job_name:
:vartype pipeline_job_name: str
:ivar pipeline_job_runtime_settings:
:vartype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:ivar display_name:
:vartype display_name: str
:ivar trigger_type: Possible values include: "Recurrence", "Cron".
:vartype trigger_type: str or ~flow.models.TriggerType
:ivar recurrence:
:vartype recurrence: ~flow.models.Recurrence
:ivar cron:
:vartype cron: ~flow.models.Cron
:ivar status: Possible values include: "Enabled", "Disabled".
:vartype status: str or ~flow.models.ScheduleStatus
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'},
'pipeline_job_runtime_settings': {'key': 'pipelineJobRuntimeSettings', 'type': 'PipelineJobRuntimeBasicSettings'},
'display_name': {'key': 'displayName', 'type': 'str'},
'trigger_type': {'key': 'triggerType', 'type': 'str'},
'recurrence': {'key': 'recurrence', 'type': 'Recurrence'},
'cron': {'key': 'cron', 'type': 'Cron'},
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword pipeline_job_name:
:paramtype pipeline_job_name: str
:keyword pipeline_job_runtime_settings:
:paramtype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:keyword display_name:
:paramtype display_name: str
:keyword trigger_type: Possible values include: "Recurrence", "Cron".
:paramtype trigger_type: str or ~flow.models.TriggerType
:keyword recurrence:
:paramtype recurrence: ~flow.models.Recurrence
:keyword cron:
:paramtype cron: ~flow.models.Cron
:keyword status: Possible values include: "Enabled", "Disabled".
:paramtype status: str or ~flow.models.ScheduleStatus
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(ModifyPipelineJobScheduleDto, self).__init__(**kwargs)
self.pipeline_job_name = kwargs.get('pipeline_job_name', None)
self.pipeline_job_runtime_settings = kwargs.get('pipeline_job_runtime_settings', None)
self.display_name = kwargs.get('display_name', None)
self.trigger_type = kwargs.get('trigger_type', None)
self.recurrence = kwargs.get('recurrence', None)
self.cron = kwargs.get('cron', None)
self.status = kwargs.get('status', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
class ModuleDto(msrest.serialization.Model):
"""ModuleDto.
:ivar namespace:
:vartype namespace: str
:ivar tags: A set of tags.
:vartype tags: list[str]
:ivar display_name:
:vartype display_name: str
:ivar dict_tags: Dictionary of :code:`<string>`.
:vartype dict_tags: dict[str, str]
:ivar module_version_id:
:vartype module_version_id: str
:ivar feed_name:
:vartype feed_name: str
:ivar registry_name:
:vartype registry_name: str
:ivar module_name:
:vartype module_name: str
:ivar module_version:
:vartype module_version: str
:ivar description:
:vartype description: str
:ivar owner:
:vartype owner: str
:ivar job_type:
:vartype job_type: str
:ivar default_version:
:vartype default_version: str
:ivar family_id:
:vartype family_id: str
:ivar help_document:
:vartype help_document: str
:ivar codegen_by:
:vartype codegen_by: str
:ivar arm_id:
:vartype arm_id: str
:ivar module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step",
"Draft", "Feed", "Registry", "SystemAutoCreated".
:vartype module_scope: str or ~flow.models.ModuleScope
:ivar module_entity:
:vartype module_entity: ~flow.models.ModuleEntity
:ivar input_types:
:vartype input_types: list[str]
:ivar output_types:
:vartype output_types: list[str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar yaml_link:
:vartype yaml_link: str
:ivar yaml_link_with_commit_sha:
:vartype yaml_link_with_commit_sha: str
:ivar module_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo".
:vartype module_source_type: str or ~flow.models.ModuleSourceType
:ivar registered_by:
:vartype registered_by: str
:ivar versions:
:vartype versions: list[~flow.models.AzureMLModuleVersionDescriptor]
:ivar is_default_module_version:
:vartype is_default_module_version: bool
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar system_meta:
:vartype system_meta: ~flow.models.SystemMeta
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar entry:
:vartype entry: str
:ivar os_type:
:vartype os_type: str
:ivar require_gpu:
:vartype require_gpu: bool
:ivar module_python_interface:
:vartype module_python_interface: ~flow.models.ModulePythonInterface
:ivar environment_asset_id:
:vartype environment_asset_id: str
:ivar run_setting_parameters:
:vartype run_setting_parameters: list[~flow.models.RunSettingParameter]
:ivar supported_ui_input_data_delivery_modes: Dictionary of
<components·9qwi7e·schemas·moduledto·properties·supporteduiinputdatadeliverymodes·additionalproperties>.
:vartype supported_ui_input_data_delivery_modes: dict[str, list[str or
~flow.models.UIInputDataDeliveryMode]]
:ivar output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`.
:vartype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec]
:ivar yaml_str:
:vartype yaml_str: str
"""
_attribute_map = {
'namespace': {'key': 'namespace', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'display_name': {'key': 'displayName', 'type': 'str'},
'dict_tags': {'key': 'dictTags', 'type': '{str}'},
'module_version_id': {'key': 'moduleVersionId', 'type': 'str'},
'feed_name': {'key': 'feedName', 'type': 'str'},
'registry_name': {'key': 'registryName', 'type': 'str'},
'module_name': {'key': 'moduleName', 'type': 'str'},
'module_version': {'key': 'moduleVersion', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'str'},
'job_type': {'key': 'jobType', 'type': 'str'},
'default_version': {'key': 'defaultVersion', 'type': 'str'},
'family_id': {'key': 'familyId', 'type': 'str'},
'help_document': {'key': 'helpDocument', 'type': 'str'},
'codegen_by': {'key': 'codegenBy', 'type': 'str'},
'arm_id': {'key': 'armId', 'type': 'str'},
'module_scope': {'key': 'moduleScope', 'type': 'str'},
'module_entity': {'key': 'moduleEntity', 'type': 'ModuleEntity'},
'input_types': {'key': 'inputTypes', 'type': '[str]'},
'output_types': {'key': 'outputTypes', 'type': '[str]'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'yaml_link': {'key': 'yamlLink', 'type': 'str'},
'yaml_link_with_commit_sha': {'key': 'yamlLinkWithCommitSha', 'type': 'str'},
'module_source_type': {'key': 'moduleSourceType', 'type': 'str'},
'registered_by': {'key': 'registeredBy', 'type': 'str'},
'versions': {'key': 'versions', 'type': '[AzureMLModuleVersionDescriptor]'},
'is_default_module_version': {'key': 'isDefaultModuleVersion', 'type': 'bool'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'system_meta': {'key': 'systemMeta', 'type': 'SystemMeta'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'entry': {'key': 'entry', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'require_gpu': {'key': 'requireGpu', 'type': 'bool'},
'module_python_interface': {'key': 'modulePythonInterface', 'type': 'ModulePythonInterface'},
'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'},
'run_setting_parameters': {'key': 'runSettingParameters', 'type': '[RunSettingParameter]'},
'supported_ui_input_data_delivery_modes': {'key': 'supportedUIInputDataDeliveryModes', 'type': '{[str]}'},
'output_setting_specs': {'key': 'outputSettingSpecs', 'type': '{OutputSettingSpec}'},
'yaml_str': {'key': 'yamlStr', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword namespace:
:paramtype namespace: str
:keyword tags: A set of tags.
:paramtype tags: list[str]
:keyword display_name:
:paramtype display_name: str
:keyword dict_tags: Dictionary of :code:`<string>`.
:paramtype dict_tags: dict[str, str]
:keyword module_version_id:
:paramtype module_version_id: str
:keyword feed_name:
:paramtype feed_name: str
:keyword registry_name:
:paramtype registry_name: str
:keyword module_name:
:paramtype module_name: str
:keyword module_version:
:paramtype module_version: str
:keyword description:
:paramtype description: str
:keyword owner:
:paramtype owner: str
:keyword job_type:
:paramtype job_type: str
:keyword default_version:
:paramtype default_version: str
:keyword family_id:
:paramtype family_id: str
:keyword help_document:
:paramtype help_document: str
:keyword codegen_by:
:paramtype codegen_by: str
:keyword arm_id:
:paramtype arm_id: str
:keyword module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous",
"Step", "Draft", "Feed", "Registry", "SystemAutoCreated".
:paramtype module_scope: str or ~flow.models.ModuleScope
:keyword module_entity:
:paramtype module_entity: ~flow.models.ModuleEntity
:keyword input_types:
:paramtype input_types: list[str]
:keyword output_types:
:paramtype output_types: list[str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword yaml_link:
:paramtype yaml_link: str
:keyword yaml_link_with_commit_sha:
:paramtype yaml_link_with_commit_sha: str
:keyword module_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo".
:paramtype module_source_type: str or ~flow.models.ModuleSourceType
:keyword registered_by:
:paramtype registered_by: str
:keyword versions:
:paramtype versions: list[~flow.models.AzureMLModuleVersionDescriptor]
:keyword is_default_module_version:
:paramtype is_default_module_version: bool
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword system_meta:
:paramtype system_meta: ~flow.models.SystemMeta
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword entry:
:paramtype entry: str
:keyword os_type:
:paramtype os_type: str
:keyword require_gpu:
:paramtype require_gpu: bool
:keyword module_python_interface:
:paramtype module_python_interface: ~flow.models.ModulePythonInterface
:keyword environment_asset_id:
:paramtype environment_asset_id: str
:keyword run_setting_parameters:
:paramtype run_setting_parameters: list[~flow.models.RunSettingParameter]
:keyword supported_ui_input_data_delivery_modes: Dictionary of
<components·9qwi7e·schemas·moduledto·properties·supporteduiinputdatadeliverymodes·additionalproperties>.
:paramtype supported_ui_input_data_delivery_modes: dict[str, list[str or
~flow.models.UIInputDataDeliveryMode]]
:keyword output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`.
:paramtype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec]
:keyword yaml_str:
:paramtype yaml_str: str
"""
super(ModuleDto, self).__init__(**kwargs)
self.namespace = kwargs.get('namespace', None)
self.tags = kwargs.get('tags', None)
self.display_name = kwargs.get('display_name', None)
self.dict_tags = kwargs.get('dict_tags', None)
self.module_version_id = kwargs.get('module_version_id', None)
self.feed_name = kwargs.get('feed_name', None)
self.registry_name = kwargs.get('registry_name', None)
self.module_name = kwargs.get('module_name', None)
self.module_version = kwargs.get('module_version', None)
self.description = kwargs.get('description', None)
self.owner = kwargs.get('owner', None)
self.job_type = kwargs.get('job_type', None)
self.default_version = kwargs.get('default_version', None)
self.family_id = kwargs.get('family_id', None)
self.help_document = kwargs.get('help_document', None)
self.codegen_by = kwargs.get('codegen_by', None)
self.arm_id = kwargs.get('arm_id', None)
self.module_scope = kwargs.get('module_scope', None)
self.module_entity = kwargs.get('module_entity', None)
self.input_types = kwargs.get('input_types', None)
self.output_types = kwargs.get('output_types', None)
self.entity_status = kwargs.get('entity_status', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
self.yaml_link = kwargs.get('yaml_link', None)
self.yaml_link_with_commit_sha = kwargs.get('yaml_link_with_commit_sha', None)
self.module_source_type = kwargs.get('module_source_type', None)
self.registered_by = kwargs.get('registered_by', None)
self.versions = kwargs.get('versions', None)
self.is_default_module_version = kwargs.get('is_default_module_version', None)
self.system_data = kwargs.get('system_data', None)
self.system_meta = kwargs.get('system_meta', None)
self.snapshot_id = kwargs.get('snapshot_id', None)
self.entry = kwargs.get('entry', None)
self.os_type = kwargs.get('os_type', None)
self.require_gpu = kwargs.get('require_gpu', None)
self.module_python_interface = kwargs.get('module_python_interface', None)
self.environment_asset_id = kwargs.get('environment_asset_id', None)
self.run_setting_parameters = kwargs.get('run_setting_parameters', None)
self.supported_ui_input_data_delivery_modes = kwargs.get('supported_ui_input_data_delivery_modes', None)
self.output_setting_specs = kwargs.get('output_setting_specs', None)
self.yaml_str = kwargs.get('yaml_str', None)
class ModuleDtoWithErrors(msrest.serialization.Model):
"""ModuleDtoWithErrors.
:ivar version_id_to_module_dto: This is a dictionary.
:vartype version_id_to_module_dto: dict[str, ~flow.models.ModuleDto]
:ivar name_and_version_to_module_dto:
:vartype name_and_version_to_module_dto:
list[~flow.models.KeyValuePairComponentNameMetaInfoModuleDto]
:ivar version_id_to_error: This is a dictionary.
:vartype version_id_to_error: dict[str, ~flow.models.ErrorResponse]
:ivar name_and_version_to_error:
:vartype name_and_version_to_error:
list[~flow.models.KeyValuePairComponentNameMetaInfoErrorResponse]
"""
_attribute_map = {
'version_id_to_module_dto': {'key': 'versionIdToModuleDto', 'type': '{ModuleDto}'},
'name_and_version_to_module_dto': {'key': 'nameAndVersionToModuleDto', 'type': '[KeyValuePairComponentNameMetaInfoModuleDto]'},
'version_id_to_error': {'key': 'versionIdToError', 'type': '{ErrorResponse}'},
'name_and_version_to_error': {'key': 'nameAndVersionToError', 'type': '[KeyValuePairComponentNameMetaInfoErrorResponse]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword version_id_to_module_dto: This is a dictionary.
:paramtype version_id_to_module_dto: dict[str, ~flow.models.ModuleDto]
:keyword name_and_version_to_module_dto:
:paramtype name_and_version_to_module_dto:
list[~flow.models.KeyValuePairComponentNameMetaInfoModuleDto]
:keyword version_id_to_error: This is a dictionary.
:paramtype version_id_to_error: dict[str, ~flow.models.ErrorResponse]
:keyword name_and_version_to_error:
:paramtype name_and_version_to_error:
list[~flow.models.KeyValuePairComponentNameMetaInfoErrorResponse]
"""
super(ModuleDtoWithErrors, self).__init__(**kwargs)
self.version_id_to_module_dto = kwargs.get('version_id_to_module_dto', None)
self.name_and_version_to_module_dto = kwargs.get('name_and_version_to_module_dto', None)
self.version_id_to_error = kwargs.get('version_id_to_error', None)
self.name_and_version_to_error = kwargs.get('name_and_version_to_error', None)
class ModuleDtoWithValidateStatus(msrest.serialization.Model):
"""ModuleDtoWithValidateStatus.
:ivar existing_module_entity:
:vartype existing_module_entity: ~flow.models.ModuleEntity
:ivar status: Possible values include: "NewModule", "NewVersion", "Conflict", "ParseError",
"ProcessRequestError".
:vartype status: str or ~flow.models.ModuleInfoFromYamlStatusEnum
:ivar status_details:
:vartype status_details: str
:ivar error_details:
:vartype error_details: list[str]
:ivar serialized_module_info:
:vartype serialized_module_info: str
:ivar namespace:
:vartype namespace: str
:ivar tags: A set of tags.
:vartype tags: list[str]
:ivar display_name:
:vartype display_name: str
:ivar dict_tags: Dictionary of :code:`<string>`.
:vartype dict_tags: dict[str, str]
:ivar module_version_id:
:vartype module_version_id: str
:ivar feed_name:
:vartype feed_name: str
:ivar registry_name:
:vartype registry_name: str
:ivar module_name:
:vartype module_name: str
:ivar module_version:
:vartype module_version: str
:ivar description:
:vartype description: str
:ivar owner:
:vartype owner: str
:ivar job_type:
:vartype job_type: str
:ivar default_version:
:vartype default_version: str
:ivar family_id:
:vartype family_id: str
:ivar help_document:
:vartype help_document: str
:ivar codegen_by:
:vartype codegen_by: str
:ivar arm_id:
:vartype arm_id: str
:ivar module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step",
"Draft", "Feed", "Registry", "SystemAutoCreated".
:vartype module_scope: str or ~flow.models.ModuleScope
:ivar module_entity:
:vartype module_entity: ~flow.models.ModuleEntity
:ivar input_types:
:vartype input_types: list[str]
:ivar output_types:
:vartype output_types: list[str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar yaml_link:
:vartype yaml_link: str
:ivar yaml_link_with_commit_sha:
:vartype yaml_link_with_commit_sha: str
:ivar module_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo".
:vartype module_source_type: str or ~flow.models.ModuleSourceType
:ivar registered_by:
:vartype registered_by: str
:ivar versions:
:vartype versions: list[~flow.models.AzureMLModuleVersionDescriptor]
:ivar is_default_module_version:
:vartype is_default_module_version: bool
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar system_meta:
:vartype system_meta: ~flow.models.SystemMeta
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar entry:
:vartype entry: str
:ivar os_type:
:vartype os_type: str
:ivar require_gpu:
:vartype require_gpu: bool
:ivar module_python_interface:
:vartype module_python_interface: ~flow.models.ModulePythonInterface
:ivar environment_asset_id:
:vartype environment_asset_id: str
:ivar run_setting_parameters:
:vartype run_setting_parameters: list[~flow.models.RunSettingParameter]
:ivar supported_ui_input_data_delivery_modes: Dictionary of
<components·8o5zaj·schemas·moduledtowithvalidatestatus·properties·supporteduiinputdatadeliverymodes·additionalproperties>.
:vartype supported_ui_input_data_delivery_modes: dict[str, list[str or
~flow.models.UIInputDataDeliveryMode]]
:ivar output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`.
:vartype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec]
:ivar yaml_str:
:vartype yaml_str: str
"""
_attribute_map = {
'existing_module_entity': {'key': 'existingModuleEntity', 'type': 'ModuleEntity'},
'status': {'key': 'status', 'type': 'str'},
'status_details': {'key': 'statusDetails', 'type': 'str'},
'error_details': {'key': 'errorDetails', 'type': '[str]'},
'serialized_module_info': {'key': 'serializedModuleInfo', 'type': 'str'},
'namespace': {'key': 'namespace', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'display_name': {'key': 'displayName', 'type': 'str'},
'dict_tags': {'key': 'dictTags', 'type': '{str}'},
'module_version_id': {'key': 'moduleVersionId', 'type': 'str'},
'feed_name': {'key': 'feedName', 'type': 'str'},
'registry_name': {'key': 'registryName', 'type': 'str'},
'module_name': {'key': 'moduleName', 'type': 'str'},
'module_version': {'key': 'moduleVersion', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'str'},
'job_type': {'key': 'jobType', 'type': 'str'},
'default_version': {'key': 'defaultVersion', 'type': 'str'},
'family_id': {'key': 'familyId', 'type': 'str'},
'help_document': {'key': 'helpDocument', 'type': 'str'},
'codegen_by': {'key': 'codegenBy', 'type': 'str'},
'arm_id': {'key': 'armId', 'type': 'str'},
'module_scope': {'key': 'moduleScope', 'type': 'str'},
'module_entity': {'key': 'moduleEntity', 'type': 'ModuleEntity'},
'input_types': {'key': 'inputTypes', 'type': '[str]'},
'output_types': {'key': 'outputTypes', 'type': '[str]'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'yaml_link': {'key': 'yamlLink', 'type': 'str'},
'yaml_link_with_commit_sha': {'key': 'yamlLinkWithCommitSha', 'type': 'str'},
'module_source_type': {'key': 'moduleSourceType', 'type': 'str'},
'registered_by': {'key': 'registeredBy', 'type': 'str'},
'versions': {'key': 'versions', 'type': '[AzureMLModuleVersionDescriptor]'},
'is_default_module_version': {'key': 'isDefaultModuleVersion', 'type': 'bool'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'system_meta': {'key': 'systemMeta', 'type': 'SystemMeta'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'entry': {'key': 'entry', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'require_gpu': {'key': 'requireGpu', 'type': 'bool'},
'module_python_interface': {'key': 'modulePythonInterface', 'type': 'ModulePythonInterface'},
'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'},
'run_setting_parameters': {'key': 'runSettingParameters', 'type': '[RunSettingParameter]'},
'supported_ui_input_data_delivery_modes': {'key': 'supportedUIInputDataDeliveryModes', 'type': '{[str]}'},
'output_setting_specs': {'key': 'outputSettingSpecs', 'type': '{OutputSettingSpec}'},
'yaml_str': {'key': 'yamlStr', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword existing_module_entity:
:paramtype existing_module_entity: ~flow.models.ModuleEntity
:keyword status: Possible values include: "NewModule", "NewVersion", "Conflict", "ParseError",
"ProcessRequestError".
:paramtype status: str or ~flow.models.ModuleInfoFromYamlStatusEnum
:keyword status_details:
:paramtype status_details: str
:keyword error_details:
:paramtype error_details: list[str]
:keyword serialized_module_info:
:paramtype serialized_module_info: str
:keyword namespace:
:paramtype namespace: str
:keyword tags: A set of tags.
:paramtype tags: list[str]
:keyword display_name:
:paramtype display_name: str
:keyword dict_tags: Dictionary of :code:`<string>`.
:paramtype dict_tags: dict[str, str]
:keyword module_version_id:
:paramtype module_version_id: str
:keyword feed_name:
:paramtype feed_name: str
:keyword registry_name:
:paramtype registry_name: str
:keyword module_name:
:paramtype module_name: str
:keyword module_version:
:paramtype module_version: str
:keyword description:
:paramtype description: str
:keyword owner:
:paramtype owner: str
:keyword job_type:
:paramtype job_type: str
:keyword default_version:
:paramtype default_version: str
:keyword family_id:
:paramtype family_id: str
:keyword help_document:
:paramtype help_document: str
:keyword codegen_by:
:paramtype codegen_by: str
:keyword arm_id:
:paramtype arm_id: str
:keyword module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous",
"Step", "Draft", "Feed", "Registry", "SystemAutoCreated".
:paramtype module_scope: str or ~flow.models.ModuleScope
:keyword module_entity:
:paramtype module_entity: ~flow.models.ModuleEntity
:keyword input_types:
:paramtype input_types: list[str]
:keyword output_types:
:paramtype output_types: list[str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword yaml_link:
:paramtype yaml_link: str
:keyword yaml_link_with_commit_sha:
:paramtype yaml_link_with_commit_sha: str
:keyword module_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo".
:paramtype module_source_type: str or ~flow.models.ModuleSourceType
:keyword registered_by:
:paramtype registered_by: str
:keyword versions:
:paramtype versions: list[~flow.models.AzureMLModuleVersionDescriptor]
:keyword is_default_module_version:
:paramtype is_default_module_version: bool
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword system_meta:
:paramtype system_meta: ~flow.models.SystemMeta
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword entry:
:paramtype entry: str
:keyword os_type:
:paramtype os_type: str
:keyword require_gpu:
:paramtype require_gpu: bool
:keyword module_python_interface:
:paramtype module_python_interface: ~flow.models.ModulePythonInterface
:keyword environment_asset_id:
:paramtype environment_asset_id: str
:keyword run_setting_parameters:
:paramtype run_setting_parameters: list[~flow.models.RunSettingParameter]
:keyword supported_ui_input_data_delivery_modes: Dictionary of
<components·8o5zaj·schemas·moduledtowithvalidatestatus·properties·supporteduiinputdatadeliverymodes·additionalproperties>.
:paramtype supported_ui_input_data_delivery_modes: dict[str, list[str or
~flow.models.UIInputDataDeliveryMode]]
:keyword output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`.
:paramtype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec]
:keyword yaml_str:
:paramtype yaml_str: str
"""
super(ModuleDtoWithValidateStatus, self).__init__(**kwargs)
self.existing_module_entity = kwargs.get('existing_module_entity', None)
self.status = kwargs.get('status', None)
self.status_details = kwargs.get('status_details', None)
self.error_details = kwargs.get('error_details', None)
self.serialized_module_info = kwargs.get('serialized_module_info', None)
self.namespace = kwargs.get('namespace', None)
self.tags = kwargs.get('tags', None)
self.display_name = kwargs.get('display_name', None)
self.dict_tags = kwargs.get('dict_tags', None)
self.module_version_id = kwargs.get('module_version_id', None)
self.feed_name = kwargs.get('feed_name', None)
self.registry_name = kwargs.get('registry_name', None)
self.module_name = kwargs.get('module_name', None)
self.module_version = kwargs.get('module_version', None)
self.description = kwargs.get('description', None)
self.owner = kwargs.get('owner', None)
self.job_type = kwargs.get('job_type', None)
self.default_version = kwargs.get('default_version', None)
self.family_id = kwargs.get('family_id', None)
self.help_document = kwargs.get('help_document', None)
self.codegen_by = kwargs.get('codegen_by', None)
self.arm_id = kwargs.get('arm_id', None)
self.module_scope = kwargs.get('module_scope', None)
self.module_entity = kwargs.get('module_entity', None)
self.input_types = kwargs.get('input_types', None)
self.output_types = kwargs.get('output_types', None)
self.entity_status = kwargs.get('entity_status', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
self.yaml_link = kwargs.get('yaml_link', None)
self.yaml_link_with_commit_sha = kwargs.get('yaml_link_with_commit_sha', None)
self.module_source_type = kwargs.get('module_source_type', None)
self.registered_by = kwargs.get('registered_by', None)
self.versions = kwargs.get('versions', None)
self.is_default_module_version = kwargs.get('is_default_module_version', None)
self.system_data = kwargs.get('system_data', None)
self.system_meta = kwargs.get('system_meta', None)
self.snapshot_id = kwargs.get('snapshot_id', None)
self.entry = kwargs.get('entry', None)
self.os_type = kwargs.get('os_type', None)
self.require_gpu = kwargs.get('require_gpu', None)
self.module_python_interface = kwargs.get('module_python_interface', None)
self.environment_asset_id = kwargs.get('environment_asset_id', None)
self.run_setting_parameters = kwargs.get('run_setting_parameters', None)
self.supported_ui_input_data_delivery_modes = kwargs.get('supported_ui_input_data_delivery_modes', None)
self.output_setting_specs = kwargs.get('output_setting_specs', None)
self.yaml_str = kwargs.get('yaml_str', None)
class ModuleEntity(msrest.serialization.Model):
"""ModuleEntity.
:ivar display_name:
:vartype display_name: str
:ivar module_execution_type:
:vartype module_execution_type: str
:ivar module_type: Possible values include: "None", "BatchInferencing".
:vartype module_type: str or ~flow.models.ModuleType
:ivar module_type_version:
:vartype module_type_version: str
:ivar upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed".
:vartype upload_state: str or ~flow.models.UploadState
:ivar is_deterministic:
:vartype is_deterministic: bool
:ivar structured_interface:
:vartype structured_interface: ~flow.models.StructuredInterface
:ivar data_location:
:vartype data_location: ~flow.models.DataLocation
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar created_by:
:vartype created_by: ~flow.models.CreatedBy
:ivar last_updated_by:
:vartype last_updated_by: ~flow.models.CreatedBy
:ivar runconfig:
:vartype runconfig: str
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.CloudSettings
:ivar category:
:vartype category: str
:ivar step_type:
:vartype step_type: str
:ivar stage:
:vartype stage: str
:ivar name:
:vartype name: str
:ivar hash:
:vartype hash: str
:ivar description:
:vartype description: str
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'module_execution_type': {'key': 'moduleExecutionType', 'type': 'str'},
'module_type': {'key': 'moduleType', 'type': 'str'},
'module_type_version': {'key': 'moduleTypeVersion', 'type': 'str'},
'upload_state': {'key': 'uploadState', 'type': 'str'},
'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
'structured_interface': {'key': 'structuredInterface', 'type': 'StructuredInterface'},
'data_location': {'key': 'dataLocation', 'type': 'DataLocation'},
'identifier_hash': {'key': 'identifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'identifierHashV2', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'created_by': {'key': 'createdBy', 'type': 'CreatedBy'},
'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'CreatedBy'},
'runconfig': {'key': 'runconfig', 'type': 'str'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'CloudSettings'},
'category': {'key': 'category', 'type': 'str'},
'step_type': {'key': 'stepType', 'type': 'str'},
'stage': {'key': 'stage', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'hash': {'key': 'hash', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword display_name:
:paramtype display_name: str
:keyword module_execution_type:
:paramtype module_execution_type: str
:keyword module_type: Possible values include: "None", "BatchInferencing".
:paramtype module_type: str or ~flow.models.ModuleType
:keyword module_type_version:
:paramtype module_type_version: str
:keyword upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed".
:paramtype upload_state: str or ~flow.models.UploadState
:keyword is_deterministic:
:paramtype is_deterministic: bool
:keyword structured_interface:
:paramtype structured_interface: ~flow.models.StructuredInterface
:keyword data_location:
:paramtype data_location: ~flow.models.DataLocation
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword created_by:
:paramtype created_by: ~flow.models.CreatedBy
:keyword last_updated_by:
:paramtype last_updated_by: ~flow.models.CreatedBy
:keyword runconfig:
:paramtype runconfig: str
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.CloudSettings
:keyword category:
:paramtype category: str
:keyword step_type:
:paramtype step_type: str
:keyword stage:
:paramtype stage: str
:keyword name:
:paramtype name: str
:keyword hash:
:paramtype hash: str
:keyword description:
:paramtype description: str
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(ModuleEntity, self).__init__(**kwargs)
self.display_name = kwargs.get('display_name', None)
self.module_execution_type = kwargs.get('module_execution_type', None)
self.module_type = kwargs.get('module_type', None)
self.module_type_version = kwargs.get('module_type_version', None)
self.upload_state = kwargs.get('upload_state', None)
self.is_deterministic = kwargs.get('is_deterministic', None)
self.structured_interface = kwargs.get('structured_interface', None)
self.data_location = kwargs.get('data_location', None)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.created_by = kwargs.get('created_by', None)
self.last_updated_by = kwargs.get('last_updated_by', None)
self.runconfig = kwargs.get('runconfig', None)
self.cloud_settings = kwargs.get('cloud_settings', None)
self.category = kwargs.get('category', None)
self.step_type = kwargs.get('step_type', None)
self.stage = kwargs.get('stage', None)
self.name = kwargs.get('name', None)
self.hash = kwargs.get('hash', None)
self.description = kwargs.get('description', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class ModulePythonInterface(msrest.serialization.Model):
"""ModulePythonInterface.
:ivar inputs:
:vartype inputs: list[~flow.models.PythonInterfaceMapping]
:ivar outputs:
:vartype outputs: list[~flow.models.PythonInterfaceMapping]
:ivar parameters:
:vartype parameters: list[~flow.models.PythonInterfaceMapping]
"""
_attribute_map = {
'inputs': {'key': 'inputs', 'type': '[PythonInterfaceMapping]'},
'outputs': {'key': 'outputs', 'type': '[PythonInterfaceMapping]'},
'parameters': {'key': 'parameters', 'type': '[PythonInterfaceMapping]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword inputs:
:paramtype inputs: list[~flow.models.PythonInterfaceMapping]
:keyword outputs:
:paramtype outputs: list[~flow.models.PythonInterfaceMapping]
:keyword parameters:
:paramtype parameters: list[~flow.models.PythonInterfaceMapping]
"""
super(ModulePythonInterface, self).__init__(**kwargs)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.parameters = kwargs.get('parameters', None)
class MpiConfiguration(msrest.serialization.Model):
"""MpiConfiguration.
:ivar process_count_per_node:
:vartype process_count_per_node: int
"""
_attribute_map = {
'process_count_per_node': {'key': 'processCountPerNode', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword process_count_per_node:
:paramtype process_count_per_node: int
"""
super(MpiConfiguration, self).__init__(**kwargs)
self.process_count_per_node = kwargs.get('process_count_per_node', None)
class NCrossValidations(msrest.serialization.Model):
"""NCrossValidations.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.NCrossValidationMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.NCrossValidationMode
:keyword value:
:paramtype value: int
"""
super(NCrossValidations, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class Node(msrest.serialization.Model):
"""Node.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp", "typescript".
:vartype type: str or ~flow.models.ToolType
:ivar source:
:vartype source: ~flow.models.NodeSource
:ivar inputs: Dictionary of :code:`<any>`.
:vartype inputs: dict[str, any]
:ivar tool:
:vartype tool: str
:ivar reduce:
:vartype reduce: bool
:ivar activate:
:vartype activate: ~flow.models.Activate
:ivar use_variants:
:vartype use_variants: bool
:ivar comment:
:vartype comment: str
:ivar api:
:vartype api: str
:ivar provider:
:vartype provider: str
:ivar connection:
:vartype connection: str
:ivar module:
:vartype module: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'source': {'key': 'source', 'type': 'NodeSource'},
'inputs': {'key': 'inputs', 'type': '{object}'},
'tool': {'key': 'tool', 'type': 'str'},
'reduce': {'key': 'reduce', 'type': 'bool'},
'activate': {'key': 'activate', 'type': 'Activate'},
'use_variants': {'key': 'use_variants', 'type': 'bool'},
'comment': {'key': 'comment', 'type': 'str'},
'api': {'key': 'api', 'type': 'str'},
'provider': {'key': 'provider', 'type': 'str'},
'connection': {'key': 'connection', 'type': 'str'},
'module': {'key': 'module', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp", "typescript".
:paramtype type: str or ~flow.models.ToolType
:keyword source:
:paramtype source: ~flow.models.NodeSource
:keyword inputs: Dictionary of :code:`<any>`.
:paramtype inputs: dict[str, any]
:keyword tool:
:paramtype tool: str
:keyword reduce:
:paramtype reduce: bool
:keyword activate:
:paramtype activate: ~flow.models.Activate
:keyword use_variants:
:paramtype use_variants: bool
:keyword comment:
:paramtype comment: str
:keyword api:
:paramtype api: str
:keyword provider:
:paramtype provider: str
:keyword connection:
:paramtype connection: str
:keyword module:
:paramtype module: str
"""
super(Node, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.source = kwargs.get('source', None)
self.inputs = kwargs.get('inputs', None)
self.tool = kwargs.get('tool', None)
self.reduce = kwargs.get('reduce', None)
self.activate = kwargs.get('activate', None)
self.use_variants = kwargs.get('use_variants', None)
self.comment = kwargs.get('comment', None)
self.api = kwargs.get('api', None)
self.provider = kwargs.get('provider', None)
self.connection = kwargs.get('connection', None)
self.module = kwargs.get('module', None)
class NodeInputPort(msrest.serialization.Model):
"""NodeInputPort.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar data_types_ids:
:vartype data_types_ids: list[str]
:ivar is_optional:
:vartype is_optional: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'data_types_ids': {'key': 'dataTypesIds', 'type': '[str]'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword data_types_ids:
:paramtype data_types_ids: list[str]
:keyword is_optional:
:paramtype is_optional: bool
"""
super(NodeInputPort, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.documentation = kwargs.get('documentation', None)
self.data_types_ids = kwargs.get('data_types_ids', None)
self.is_optional = kwargs.get('is_optional', None)
class NodeLayout(msrest.serialization.Model):
"""NodeLayout.
:ivar x:
:vartype x: float
:ivar y:
:vartype y: float
:ivar width:
:vartype width: float
:ivar height:
:vartype height: float
:ivar extended_data:
:vartype extended_data: str
"""
_attribute_map = {
'x': {'key': 'x', 'type': 'float'},
'y': {'key': 'y', 'type': 'float'},
'width': {'key': 'width', 'type': 'float'},
'height': {'key': 'height', 'type': 'float'},
'extended_data': {'key': 'extendedData', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword x:
:paramtype x: float
:keyword y:
:paramtype y: float
:keyword width:
:paramtype width: float
:keyword height:
:paramtype height: float
:keyword extended_data:
:paramtype extended_data: str
"""
super(NodeLayout, self).__init__(**kwargs)
self.x = kwargs.get('x', None)
self.y = kwargs.get('y', None)
self.width = kwargs.get('width', None)
self.height = kwargs.get('height', None)
self.extended_data = kwargs.get('extended_data', None)
class NodeOutputPort(msrest.serialization.Model):
"""NodeOutputPort.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar data_type_id:
:vartype data_type_id: str
:ivar pass_through_input_name:
:vartype pass_through_input_name: str
:ivar early_available:
:vartype early_available: bool
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
'pass_through_input_name': {'key': 'passThroughInputName', 'type': 'str'},
'early_available': {'key': 'EarlyAvailable', 'type': 'bool'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword data_type_id:
:paramtype data_type_id: str
:keyword pass_through_input_name:
:paramtype pass_through_input_name: str
:keyword early_available:
:paramtype early_available: bool
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
"""
super(NodeOutputPort, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.documentation = kwargs.get('documentation', None)
self.data_type_id = kwargs.get('data_type_id', None)
self.pass_through_input_name = kwargs.get('pass_through_input_name', None)
self.early_available = kwargs.get('early_available', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
class NodePortInterface(msrest.serialization.Model):
"""NodePortInterface.
:ivar inputs:
:vartype inputs: list[~flow.models.NodeInputPort]
:ivar outputs:
:vartype outputs: list[~flow.models.NodeOutputPort]
:ivar control_outputs:
:vartype control_outputs: list[~flow.models.ControlOutput]
"""
_attribute_map = {
'inputs': {'key': 'inputs', 'type': '[NodeInputPort]'},
'outputs': {'key': 'outputs', 'type': '[NodeOutputPort]'},
'control_outputs': {'key': 'controlOutputs', 'type': '[ControlOutput]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword inputs:
:paramtype inputs: list[~flow.models.NodeInputPort]
:keyword outputs:
:paramtype outputs: list[~flow.models.NodeOutputPort]
:keyword control_outputs:
:paramtype control_outputs: list[~flow.models.ControlOutput]
"""
super(NodePortInterface, self).__init__(**kwargs)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.control_outputs = kwargs.get('control_outputs', None)
class Nodes(msrest.serialization.Model):
"""Nodes.
All required parameters must be populated in order to send to Azure.
:ivar nodes_value_type: Required. Possible values include: "All", "Custom".
:vartype nodes_value_type: str or ~flow.models.NodesValueType
:ivar values:
:vartype values: list[int]
"""
_validation = {
'nodes_value_type': {'required': True},
}
_attribute_map = {
'nodes_value_type': {'key': 'nodes_value_type', 'type': 'str'},
'values': {'key': 'values', 'type': '[int]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword nodes_value_type: Required. Possible values include: "All", "Custom".
:paramtype nodes_value_type: str or ~flow.models.NodesValueType
:keyword values:
:paramtype values: list[int]
"""
super(Nodes, self).__init__(**kwargs)
self.nodes_value_type = kwargs['nodes_value_type']
self.values = kwargs.get('values', None)
class NodeSource(msrest.serialization.Model):
"""NodeSource.
:ivar type:
:vartype type: str
:ivar tool:
:vartype tool: str
:ivar path:
:vartype path: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'tool': {'key': 'tool', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type:
:paramtype type: str
:keyword tool:
:paramtype tool: str
:keyword path:
:paramtype path: str
"""
super(NodeSource, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.tool = kwargs.get('tool', None)
self.path = kwargs.get('path', None)
class NodeTelemetryMetaInfo(msrest.serialization.Model):
"""NodeTelemetryMetaInfo.
:ivar pipeline_run_id:
:vartype pipeline_run_id: str
:ivar node_id:
:vartype node_id: str
:ivar version_id:
:vartype version_id: str
:ivar node_type:
:vartype node_type: str
:ivar node_source:
:vartype node_source: str
:ivar is_anonymous:
:vartype is_anonymous: bool
:ivar is_pipeline_component:
:vartype is_pipeline_component: bool
"""
_attribute_map = {
'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
'node_id': {'key': 'nodeId', 'type': 'str'},
'version_id': {'key': 'versionId', 'type': 'str'},
'node_type': {'key': 'nodeType', 'type': 'str'},
'node_source': {'key': 'nodeSource', 'type': 'str'},
'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
'is_pipeline_component': {'key': 'isPipelineComponent', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword pipeline_run_id:
:paramtype pipeline_run_id: str
:keyword node_id:
:paramtype node_id: str
:keyword version_id:
:paramtype version_id: str
:keyword node_type:
:paramtype node_type: str
:keyword node_source:
:paramtype node_source: str
:keyword is_anonymous:
:paramtype is_anonymous: bool
:keyword is_pipeline_component:
:paramtype is_pipeline_component: bool
"""
super(NodeTelemetryMetaInfo, self).__init__(**kwargs)
self.pipeline_run_id = kwargs.get('pipeline_run_id', None)
self.node_id = kwargs.get('node_id', None)
self.version_id = kwargs.get('version_id', None)
self.node_type = kwargs.get('node_type', None)
self.node_source = kwargs.get('node_source', None)
self.is_anonymous = kwargs.get('is_anonymous', None)
self.is_pipeline_component = kwargs.get('is_pipeline_component', None)
class NodeVariant(msrest.serialization.Model):
"""NodeVariant.
:ivar variants: This is a dictionary.
:vartype variants: dict[str, ~flow.models.VariantNode]
:ivar default_variant_id:
:vartype default_variant_id: str
"""
_attribute_map = {
'variants': {'key': 'variants', 'type': '{VariantNode}'},
'default_variant_id': {'key': 'defaultVariantId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword variants: This is a dictionary.
:paramtype variants: dict[str, ~flow.models.VariantNode]
:keyword default_variant_id:
:paramtype default_variant_id: str
"""
super(NodeVariant, self).__init__(**kwargs)
self.variants = kwargs.get('variants', None)
self.default_variant_id = kwargs.get('default_variant_id', None)
class NoteBookTaskDto(msrest.serialization.Model):
"""NoteBookTaskDto.
:ivar notebook_path:
:vartype notebook_path: str
:ivar base_parameters: Dictionary of :code:`<string>`.
:vartype base_parameters: dict[str, str]
"""
_attribute_map = {
'notebook_path': {'key': 'notebook_path', 'type': 'str'},
'base_parameters': {'key': 'base_parameters', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword notebook_path:
:paramtype notebook_path: str
:keyword base_parameters: Dictionary of :code:`<string>`.
:paramtype base_parameters: dict[str, str]
"""
super(NoteBookTaskDto, self).__init__(**kwargs)
self.notebook_path = kwargs.get('notebook_path', None)
self.base_parameters = kwargs.get('base_parameters', None)
class NotificationSetting(msrest.serialization.Model):
"""NotificationSetting.
:ivar emails:
:vartype emails: list[str]
:ivar email_on:
:vartype email_on: list[str or ~flow.models.EmailNotificationEnableType]
:ivar webhooks: Dictionary of :code:`<Webhook>`.
:vartype webhooks: dict[str, ~flow.models.Webhook]
"""
_attribute_map = {
'emails': {'key': 'emails', 'type': '[str]'},
'email_on': {'key': 'emailOn', 'type': '[str]'},
'webhooks': {'key': 'webhooks', 'type': '{Webhook}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword emails:
:paramtype emails: list[str]
:keyword email_on:
:paramtype email_on: list[str or ~flow.models.EmailNotificationEnableType]
:keyword webhooks: Dictionary of :code:`<Webhook>`.
:paramtype webhooks: dict[str, ~flow.models.Webhook]
"""
super(NotificationSetting, self).__init__(**kwargs)
self.emails = kwargs.get('emails', None)
self.email_on = kwargs.get('email_on', None)
self.webhooks = kwargs.get('webhooks', None)
class ODataError(msrest.serialization.Model):
"""Represents OData v4 error object.
:ivar code: Gets or sets a language-independent, service-defined error code.
This code serves as a sub-status for the HTTP error code specified
in the response.
:vartype code: str
:ivar message: Gets or sets a human-readable, language-dependent representation of the error.
The ``Content-Language`` header MUST contain the language code from [RFC5646]
corresponding to the language in which the value for message is written.
:vartype message: str
:ivar target: Gets or sets the target of the particular error
(for example, the name of the property in error).
:vartype target: str
:ivar details: Gets or sets additional details about the error.
:vartype details: list[~flow.models.ODataErrorDetail]
:ivar innererror: The contents of this object are service-defined.
Usually this object contains information that will help debug the service
and SHOULD only be used in development environments in order to guard
against potential security concerns around information disclosure.
:vartype innererror: ~flow.models.ODataInnerError
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[ODataErrorDetail]'},
'innererror': {'key': 'innererror', 'type': 'ODataInnerError'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code: Gets or sets a language-independent, service-defined error code.
This code serves as a sub-status for the HTTP error code specified
in the response.
:paramtype code: str
:keyword message: Gets or sets a human-readable, language-dependent representation of the
error.
The ``Content-Language`` header MUST contain the language code from [RFC5646]
corresponding to the language in which the value for message is written.
:paramtype message: str
:keyword target: Gets or sets the target of the particular error
(for example, the name of the property in error).
:paramtype target: str
:keyword details: Gets or sets additional details about the error.
:paramtype details: list[~flow.models.ODataErrorDetail]
:keyword innererror: The contents of this object are service-defined.
Usually this object contains information that will help debug the service
and SHOULD only be used in development environments in order to guard
against potential security concerns around information disclosure.
:paramtype innererror: ~flow.models.ODataInnerError
"""
super(ODataError, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.target = kwargs.get('target', None)
self.details = kwargs.get('details', None)
self.innererror = kwargs.get('innererror', None)
class ODataErrorDetail(msrest.serialization.Model):
"""Represents additional error details.
:ivar code: Gets or sets a language-independent, service-defined error code.
:vartype code: str
:ivar message: Gets or sets a human-readable, language-dependent representation of the error.
:vartype message: str
:ivar target: Gets or sets the target of the particular error
(for example, the name of the property in error).
:vartype target: str
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code: Gets or sets a language-independent, service-defined error code.
:paramtype code: str
:keyword message: Gets or sets a human-readable, language-dependent representation of the
error.
:paramtype message: str
:keyword target: Gets or sets the target of the particular error
(for example, the name of the property in error).
:paramtype target: str
"""
super(ODataErrorDetail, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.target = kwargs.get('target', None)
class ODataErrorResponse(msrest.serialization.Model):
"""Represents OData v4 compliant error response message.
:ivar error: Represents OData v4 error object.
:vartype error: ~flow.models.ODataError
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'ODataError'},
}
def __init__(
self,
**kwargs
):
"""
:keyword error: Represents OData v4 error object.
:paramtype error: ~flow.models.ODataError
"""
super(ODataErrorResponse, self).__init__(**kwargs)
self.error = kwargs.get('error', None)
class ODataInnerError(msrest.serialization.Model):
"""The contents of this object are service-defined.
Usually this object contains information that will help debug the service
and SHOULD only be used in development environments in order to guard
against potential security concerns around information disclosure.
:ivar client_request_id: Gets or sets the client provided request ID.
:vartype client_request_id: str
:ivar service_request_id: Gets or sets the server generated request ID.
:vartype service_request_id: str
:ivar trace: Gets or sets the exception stack trace.
DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT.
:vartype trace: str
:ivar context: Gets or sets additional context for the exception.
DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT.
:vartype context: str
"""
_attribute_map = {
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'service_request_id': {'key': 'serviceRequestId', 'type': 'str'},
'trace': {'key': 'trace', 'type': 'str'},
'context': {'key': 'context', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword client_request_id: Gets or sets the client provided request ID.
:paramtype client_request_id: str
:keyword service_request_id: Gets or sets the server generated request ID.
:paramtype service_request_id: str
:keyword trace: Gets or sets the exception stack trace.
DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT.
:paramtype trace: str
:keyword context: Gets or sets additional context for the exception.
DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT.
:paramtype context: str
"""
super(ODataInnerError, self).__init__(**kwargs)
self.client_request_id = kwargs.get('client_request_id', None)
self.service_request_id = kwargs.get('service_request_id', None)
self.trace = kwargs.get('trace', None)
self.context = kwargs.get('context', None)
class OutputData(msrest.serialization.Model):
"""OutputData.
:ivar output_location:
:vartype output_location: ~flow.models.ExecutionDataLocation
:ivar mechanism: Possible values include: "Upload", "Mount", "Hdfs", "Link", "Direct".
:vartype mechanism: str or ~flow.models.OutputMechanism
:ivar additional_options:
:vartype additional_options: ~flow.models.OutputOptions
:ivar environment_variable_name:
:vartype environment_variable_name: str
"""
_attribute_map = {
'output_location': {'key': 'outputLocation', 'type': 'ExecutionDataLocation'},
'mechanism': {'key': 'mechanism', 'type': 'str'},
'additional_options': {'key': 'additionalOptions', 'type': 'OutputOptions'},
'environment_variable_name': {'key': 'environmentVariableName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword output_location:
:paramtype output_location: ~flow.models.ExecutionDataLocation
:keyword mechanism: Possible values include: "Upload", "Mount", "Hdfs", "Link", "Direct".
:paramtype mechanism: str or ~flow.models.OutputMechanism
:keyword additional_options:
:paramtype additional_options: ~flow.models.OutputOptions
:keyword environment_variable_name:
:paramtype environment_variable_name: str
"""
super(OutputData, self).__init__(**kwargs)
self.output_location = kwargs.get('output_location', None)
self.mechanism = kwargs.get('mechanism', None)
self.additional_options = kwargs.get('additional_options', None)
self.environment_variable_name = kwargs.get('environment_variable_name', None)
class OutputDataBinding(msrest.serialization.Model):
"""OutputDataBinding.
:ivar datastore_id:
:vartype datastore_id: str
:ivar path_on_datastore:
:vartype path_on_datastore: str
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar description:
:vartype description: str
:ivar uri:
:vartype uri: ~flow.models.MfeInternalUriReference
:ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:vartype mode: str or ~flow.models.DataBindingMode
:ivar asset_uri:
:vartype asset_uri: str
:ivar is_asset_job_output:
:vartype is_asset_job_output: bool
:ivar job_output_type: Possible values include: "Uri", "Dataset", "UriFile", "UriFolder",
"MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:vartype job_output_type: str or ~flow.models.JobOutputType
:ivar asset_name:
:vartype asset_name: str
:ivar asset_version:
:vartype asset_version: str
:ivar auto_delete_setting:
:vartype auto_delete_setting: ~flow.models.AutoDeleteSetting
"""
_attribute_map = {
'datastore_id': {'key': 'datastoreId', 'type': 'str'},
'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'MfeInternalUriReference'},
'mode': {'key': 'mode', 'type': 'str'},
'asset_uri': {'key': 'assetUri', 'type': 'str'},
'is_asset_job_output': {'key': 'isAssetJobOutput', 'type': 'bool'},
'job_output_type': {'key': 'jobOutputType', 'type': 'str'},
'asset_name': {'key': 'assetName', 'type': 'str'},
'asset_version': {'key': 'assetVersion', 'type': 'str'},
'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'},
}
def __init__(
self,
**kwargs
):
"""
:keyword datastore_id:
:paramtype datastore_id: str
:keyword path_on_datastore:
:paramtype path_on_datastore: str
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword description:
:paramtype description: str
:keyword uri:
:paramtype uri: ~flow.models.MfeInternalUriReference
:keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:paramtype mode: str or ~flow.models.DataBindingMode
:keyword asset_uri:
:paramtype asset_uri: str
:keyword is_asset_job_output:
:paramtype is_asset_job_output: bool
:keyword job_output_type: Possible values include: "Uri", "Dataset", "UriFile", "UriFolder",
"MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:paramtype job_output_type: str or ~flow.models.JobOutputType
:keyword asset_name:
:paramtype asset_name: str
:keyword asset_version:
:paramtype asset_version: str
:keyword auto_delete_setting:
:paramtype auto_delete_setting: ~flow.models.AutoDeleteSetting
"""
super(OutputDataBinding, self).__init__(**kwargs)
self.datastore_id = kwargs.get('datastore_id', None)
self.path_on_datastore = kwargs.get('path_on_datastore', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.description = kwargs.get('description', None)
self.uri = kwargs.get('uri', None)
self.mode = kwargs.get('mode', None)
self.asset_uri = kwargs.get('asset_uri', None)
self.is_asset_job_output = kwargs.get('is_asset_job_output', None)
self.job_output_type = kwargs.get('job_output_type', None)
self.asset_name = kwargs.get('asset_name', None)
self.asset_version = kwargs.get('asset_version', None)
self.auto_delete_setting = kwargs.get('auto_delete_setting', None)
class OutputDatasetLineage(msrest.serialization.Model):
"""OutputDatasetLineage.
:ivar identifier:
:vartype identifier: ~flow.models.DatasetIdentifier
:ivar output_type: Possible values include: "RunOutput", "Reference".
:vartype output_type: str or ~flow.models.DatasetOutputType
:ivar output_details:
:vartype output_details: ~flow.models.DatasetOutputDetails
"""
_attribute_map = {
'identifier': {'key': 'identifier', 'type': 'DatasetIdentifier'},
'output_type': {'key': 'outputType', 'type': 'str'},
'output_details': {'key': 'outputDetails', 'type': 'DatasetOutputDetails'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier:
:paramtype identifier: ~flow.models.DatasetIdentifier
:keyword output_type: Possible values include: "RunOutput", "Reference".
:paramtype output_type: str or ~flow.models.DatasetOutputType
:keyword output_details:
:paramtype output_details: ~flow.models.DatasetOutputDetails
"""
super(OutputDatasetLineage, self).__init__(**kwargs)
self.identifier = kwargs.get('identifier', None)
self.output_type = kwargs.get('output_type', None)
self.output_details = kwargs.get('output_details', None)
class OutputDefinition(msrest.serialization.Model):
"""OutputDefinition.
:ivar name:
:vartype name: str
:ivar type:
:vartype type: list[str or ~flow.models.ValueType]
:ivar description:
:vartype description: str
:ivar is_property:
:vartype is_property: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': '[str]'},
'description': {'key': 'description', 'type': 'str'},
'is_property': {'key': 'isProperty', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type:
:paramtype type: list[str or ~flow.models.ValueType]
:keyword description:
:paramtype description: str
:keyword is_property:
:paramtype is_property: bool
"""
super(OutputDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.description = kwargs.get('description', None)
self.is_property = kwargs.get('is_property', None)
class OutputOptions(msrest.serialization.Model):
"""OutputOptions.
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar registration_options:
:vartype registration_options: ~flow.models.RegistrationOptions
:ivar upload_options:
:vartype upload_options: ~flow.models.UploadOptions
:ivar mount_options: Dictionary of :code:`<string>`.
:vartype mount_options: dict[str, str]
"""
_attribute_map = {
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'registration_options': {'key': 'registrationOptions', 'type': 'RegistrationOptions'},
'upload_options': {'key': 'uploadOptions', 'type': 'UploadOptions'},
'mount_options': {'key': 'mountOptions', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword registration_options:
:paramtype registration_options: ~flow.models.RegistrationOptions
:keyword upload_options:
:paramtype upload_options: ~flow.models.UploadOptions
:keyword mount_options: Dictionary of :code:`<string>`.
:paramtype mount_options: dict[str, str]
"""
super(OutputOptions, self).__init__(**kwargs)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.registration_options = kwargs.get('registration_options', None)
self.upload_options = kwargs.get('upload_options', None)
self.mount_options = kwargs.get('mount_options', None)
class OutputSetting(msrest.serialization.Model):
"""OutputSetting.
:ivar name:
:vartype name: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_name_parameter_assignment:
:vartype data_store_name_parameter_assignment: ~flow.models.ParameterAssignment
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar data_store_mode_parameter_assignment:
:vartype data_store_mode_parameter_assignment: ~flow.models.ParameterAssignment
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar path_on_compute_parameter_assignment:
:vartype path_on_compute_parameter_assignment: ~flow.models.ParameterAssignment
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar web_service_port:
:vartype web_service_port: str
:ivar dataset_registration:
:vartype dataset_registration: ~flow.models.DatasetRegistration
:ivar dataset_output_options:
:vartype dataset_output_options: ~flow.models.DatasetOutputOptions
:ivar asset_output_settings:
:vartype asset_output_settings: ~flow.models.AssetOutputSettings
:ivar parameter_name:
:vartype parameter_name: str
:ivar asset_output_settings_parameter_name:
:vartype asset_output_settings_parameter_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_name_parameter_assignment': {'key': 'DataStoreNameParameterAssignment', 'type': 'ParameterAssignment'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'data_store_mode_parameter_assignment': {'key': 'DataStoreModeParameterAssignment', 'type': 'ParameterAssignment'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'path_on_compute_parameter_assignment': {'key': 'PathOnComputeParameterAssignment', 'type': 'ParameterAssignment'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'web_service_port': {'key': 'webServicePort', 'type': 'str'},
'dataset_registration': {'key': 'datasetRegistration', 'type': 'DatasetRegistration'},
'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'DatasetOutputOptions'},
'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AssetOutputSettings'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
'asset_output_settings_parameter_name': {'key': 'AssetOutputSettingsParameterName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_name_parameter_assignment:
:paramtype data_store_name_parameter_assignment: ~flow.models.ParameterAssignment
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword data_store_mode_parameter_assignment:
:paramtype data_store_mode_parameter_assignment: ~flow.models.ParameterAssignment
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword path_on_compute_parameter_assignment:
:paramtype path_on_compute_parameter_assignment: ~flow.models.ParameterAssignment
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword web_service_port:
:paramtype web_service_port: str
:keyword dataset_registration:
:paramtype dataset_registration: ~flow.models.DatasetRegistration
:keyword dataset_output_options:
:paramtype dataset_output_options: ~flow.models.DatasetOutputOptions
:keyword asset_output_settings:
:paramtype asset_output_settings: ~flow.models.AssetOutputSettings
:keyword parameter_name:
:paramtype parameter_name: str
:keyword asset_output_settings_parameter_name:
:paramtype asset_output_settings_parameter_name: str
"""
super(OutputSetting, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_store_name_parameter_assignment = kwargs.get('data_store_name_parameter_assignment', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.data_store_mode_parameter_assignment = kwargs.get('data_store_mode_parameter_assignment', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.path_on_compute_parameter_assignment = kwargs.get('path_on_compute_parameter_assignment', None)
self.overwrite = kwargs.get('overwrite', None)
self.data_reference_name = kwargs.get('data_reference_name', None)
self.web_service_port = kwargs.get('web_service_port', None)
self.dataset_registration = kwargs.get('dataset_registration', None)
self.dataset_output_options = kwargs.get('dataset_output_options', None)
self.asset_output_settings = kwargs.get('asset_output_settings', None)
self.parameter_name = kwargs.get('parameter_name', None)
self.asset_output_settings_parameter_name = kwargs.get('asset_output_settings_parameter_name', None)
class OutputSettingSpec(msrest.serialization.Model):
"""OutputSettingSpec.
:ivar supported_data_store_modes:
:vartype supported_data_store_modes: list[str or ~flow.models.AEVADataStoreMode]
:ivar default_asset_output_path:
:vartype default_asset_output_path: str
"""
_attribute_map = {
'supported_data_store_modes': {'key': 'supportedDataStoreModes', 'type': '[str]'},
'default_asset_output_path': {'key': 'defaultAssetOutputPath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword supported_data_store_modes:
:paramtype supported_data_store_modes: list[str or ~flow.models.AEVADataStoreMode]
:keyword default_asset_output_path:
:paramtype default_asset_output_path: str
"""
super(OutputSettingSpec, self).__init__(**kwargs)
self.supported_data_store_modes = kwargs.get('supported_data_store_modes', None)
self.default_asset_output_path = kwargs.get('default_asset_output_path', None)
class PaginatedDataInfoList(msrest.serialization.Model):
"""A paginated list of DataInfos.
:ivar value: An array of objects of type DataInfo.
:vartype value: list[~flow.models.DataInfo]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[DataInfo]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value: An array of objects of type DataInfo.
:paramtype value: list[~flow.models.DataInfo]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedDataInfoList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.next_link = kwargs.get('next_link', None)
class PaginatedModelDtoList(msrest.serialization.Model):
"""A paginated list of ModelDtos.
:ivar value: An array of objects of type ModelDto.
:vartype value: list[~flow.models.ModelDto]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ModelDto]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value: An array of objects of type ModelDto.
:paramtype value: list[~flow.models.ModelDto]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedModelDtoList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.next_link = kwargs.get('next_link', None)
class PaginatedModuleDtoList(msrest.serialization.Model):
"""A paginated list of ModuleDtos.
:ivar value: An array of objects of type ModuleDto.
:vartype value: list[~flow.models.ModuleDto]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ModuleDto]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value: An array of objects of type ModuleDto.
:paramtype value: list[~flow.models.ModuleDto]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedModuleDtoList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.next_link = kwargs.get('next_link', None)
class PaginatedPipelineDraftSummaryList(msrest.serialization.Model):
"""A paginated list of PipelineDraftSummarys.
:ivar value: An array of objects of type PipelineDraftSummary.
:vartype value: list[~flow.models.PipelineDraftSummary]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PipelineDraftSummary]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value: An array of objects of type PipelineDraftSummary.
:paramtype value: list[~flow.models.PipelineDraftSummary]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedPipelineDraftSummaryList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.next_link = kwargs.get('next_link', None)
class PaginatedPipelineEndpointSummaryList(msrest.serialization.Model):
"""A paginated list of PipelineEndpointSummarys.
:ivar value: An array of objects of type PipelineEndpointSummary.
:vartype value: list[~flow.models.PipelineEndpointSummary]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PipelineEndpointSummary]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value: An array of objects of type PipelineEndpointSummary.
:paramtype value: list[~flow.models.PipelineEndpointSummary]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedPipelineEndpointSummaryList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.next_link = kwargs.get('next_link', None)
class PaginatedPipelineRunSummaryList(msrest.serialization.Model):
"""A paginated list of PipelineRunSummarys.
:ivar value: An array of objects of type PipelineRunSummary.
:vartype value: list[~flow.models.PipelineRunSummary]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PipelineRunSummary]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value: An array of objects of type PipelineRunSummary.
:paramtype value: list[~flow.models.PipelineRunSummary]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedPipelineRunSummaryList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.next_link = kwargs.get('next_link', None)
class PaginatedPublishedPipelineSummaryList(msrest.serialization.Model):
"""A paginated list of PublishedPipelineSummarys.
:ivar value: An array of objects of type PublishedPipelineSummary.
:vartype value: list[~flow.models.PublishedPipelineSummary]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PublishedPipelineSummary]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value: An array of objects of type PublishedPipelineSummary.
:paramtype value: list[~flow.models.PublishedPipelineSummary]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedPublishedPipelineSummaryList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.next_link = kwargs.get('next_link', None)
class ParallelForControlFlowInfo(msrest.serialization.Model):
"""ParallelForControlFlowInfo.
:ivar parallel_for_items_input:
:vartype parallel_for_items_input: ~flow.models.ParameterAssignment
"""
_attribute_map = {
'parallel_for_items_input': {'key': 'parallelForItemsInput', 'type': 'ParameterAssignment'},
}
def __init__(
self,
**kwargs
):
"""
:keyword parallel_for_items_input:
:paramtype parallel_for_items_input: ~flow.models.ParameterAssignment
"""
super(ParallelForControlFlowInfo, self).__init__(**kwargs)
self.parallel_for_items_input = kwargs.get('parallel_for_items_input', None)
class ParallelTaskConfiguration(msrest.serialization.Model):
"""ParallelTaskConfiguration.
:ivar max_retries_per_worker:
:vartype max_retries_per_worker: int
:ivar worker_count_per_node:
:vartype worker_count_per_node: int
:ivar terminal_exit_codes:
:vartype terminal_exit_codes: list[int]
:ivar configuration: Dictionary of :code:`<string>`.
:vartype configuration: dict[str, str]
"""
_attribute_map = {
'max_retries_per_worker': {'key': 'maxRetriesPerWorker', 'type': 'int'},
'worker_count_per_node': {'key': 'workerCountPerNode', 'type': 'int'},
'terminal_exit_codes': {'key': 'terminalExitCodes', 'type': '[int]'},
'configuration': {'key': 'configuration', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_retries_per_worker:
:paramtype max_retries_per_worker: int
:keyword worker_count_per_node:
:paramtype worker_count_per_node: int
:keyword terminal_exit_codes:
:paramtype terminal_exit_codes: list[int]
:keyword configuration: Dictionary of :code:`<string>`.
:paramtype configuration: dict[str, str]
"""
super(ParallelTaskConfiguration, self).__init__(**kwargs)
self.max_retries_per_worker = kwargs.get('max_retries_per_worker', None)
self.worker_count_per_node = kwargs.get('worker_count_per_node', None)
self.terminal_exit_codes = kwargs.get('terminal_exit_codes', None)
self.configuration = kwargs.get('configuration', None)
class Parameter(msrest.serialization.Model):
"""Parameter.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar default_value:
:vartype default_value: str
:ivar is_optional:
:vartype is_optional: bool
:ivar min_max_rules:
:vartype min_max_rules: list[~flow.models.MinMaxParameterRule]
:ivar enum_rules:
:vartype enum_rules: list[~flow.models.EnumParameterRule]
:ivar type: Possible values include: "Int", "Double", "Bool", "String", "Undefined".
:vartype type: str or ~flow.models.ParameterType
:ivar label:
:vartype label: str
:ivar group_names:
:vartype group_names: list[str]
:ivar argument_name:
:vartype argument_name: str
:ivar ui_hint:
:vartype ui_hint: ~flow.models.UIParameterHint
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'min_max_rules': {'key': 'minMaxRules', 'type': '[MinMaxParameterRule]'},
'enum_rules': {'key': 'enumRules', 'type': '[EnumParameterRule]'},
'type': {'key': 'type', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'group_names': {'key': 'groupNames', 'type': '[str]'},
'argument_name': {'key': 'argumentName', 'type': 'str'},
'ui_hint': {'key': 'uiHint', 'type': 'UIParameterHint'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword default_value:
:paramtype default_value: str
:keyword is_optional:
:paramtype is_optional: bool
:keyword min_max_rules:
:paramtype min_max_rules: list[~flow.models.MinMaxParameterRule]
:keyword enum_rules:
:paramtype enum_rules: list[~flow.models.EnumParameterRule]
:keyword type: Possible values include: "Int", "Double", "Bool", "String", "Undefined".
:paramtype type: str or ~flow.models.ParameterType
:keyword label:
:paramtype label: str
:keyword group_names:
:paramtype group_names: list[str]
:keyword argument_name:
:paramtype argument_name: str
:keyword ui_hint:
:paramtype ui_hint: ~flow.models.UIParameterHint
"""
super(Parameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.documentation = kwargs.get('documentation', None)
self.default_value = kwargs.get('default_value', None)
self.is_optional = kwargs.get('is_optional', None)
self.min_max_rules = kwargs.get('min_max_rules', None)
self.enum_rules = kwargs.get('enum_rules', None)
self.type = kwargs.get('type', None)
self.label = kwargs.get('label', None)
self.group_names = kwargs.get('group_names', None)
self.argument_name = kwargs.get('argument_name', None)
self.ui_hint = kwargs.get('ui_hint', None)
class ParameterAssignment(msrest.serialization.Model):
"""ParameterAssignment.
:ivar value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:vartype value_type: str or ~flow.models.ParameterValueType
:ivar assignments_to_concatenate:
:vartype assignments_to_concatenate: list[~flow.models.ParameterAssignment]
:ivar data_path_assignment:
:vartype data_path_assignment: ~flow.models.LegacyDataPath
:ivar data_set_definition_value_assignment:
:vartype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue
:ivar name:
:vartype name: str
:ivar value:
:vartype value: str
"""
_attribute_map = {
'value_type': {'key': 'valueType', 'type': 'str'},
'assignments_to_concatenate': {'key': 'assignmentsToConcatenate', 'type': '[ParameterAssignment]'},
'data_path_assignment': {'key': 'dataPathAssignment', 'type': 'LegacyDataPath'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': 'DataSetDefinitionValue'},
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:paramtype value_type: str or ~flow.models.ParameterValueType
:keyword assignments_to_concatenate:
:paramtype assignments_to_concatenate: list[~flow.models.ParameterAssignment]
:keyword data_path_assignment:
:paramtype data_path_assignment: ~flow.models.LegacyDataPath
:keyword data_set_definition_value_assignment:
:paramtype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue
:keyword name:
:paramtype name: str
:keyword value:
:paramtype value: str
"""
super(ParameterAssignment, self).__init__(**kwargs)
self.value_type = kwargs.get('value_type', None)
self.assignments_to_concatenate = kwargs.get('assignments_to_concatenate', None)
self.data_path_assignment = kwargs.get('data_path_assignment', None)
self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None)
self.name = kwargs.get('name', None)
self.value = kwargs.get('value', None)
class ParameterDefinition(msrest.serialization.Model):
"""ParameterDefinition.
:ivar name:
:vartype name: str
:ivar type:
:vartype type: str
:ivar value:
:vartype value: str
:ivar is_optional:
:vartype is_optional: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type:
:paramtype type: str
:keyword value:
:paramtype value: str
:keyword is_optional:
:paramtype is_optional: bool
"""
super(ParameterDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.value = kwargs.get('value', None)
self.is_optional = kwargs.get('is_optional', None)
class PatchFlowRequest(msrest.serialization.Model):
"""PatchFlowRequest.
:ivar flow_patch_operation_type: Possible values include: "ArchiveFlow", "RestoreFlow",
"ExportFlowToFile".
:vartype flow_patch_operation_type: str or ~flow.models.FlowPatchOperationType
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
"""
_attribute_map = {
'flow_patch_operation_type': {'key': 'flowPatchOperationType', 'type': 'str'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_patch_operation_type: Possible values include: "ArchiveFlow", "RestoreFlow",
"ExportFlowToFile".
:paramtype flow_patch_operation_type: str or ~flow.models.FlowPatchOperationType
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
"""
super(PatchFlowRequest, self).__init__(**kwargs)
self.flow_patch_operation_type = kwargs.get('flow_patch_operation_type', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
class Pipeline(msrest.serialization.Model):
"""Pipeline.
:ivar run_id:
:vartype run_id: str
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar default_datastore_name:
:vartype default_datastore_name: str
:ivar component_jobs: This is a dictionary.
:vartype component_jobs: dict[str, ~flow.models.ComponentJob]
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.PipelineInput]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.PipelineOutput]
"""
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'default_datastore_name': {'key': 'defaultDatastoreName', 'type': 'str'},
'component_jobs': {'key': 'componentJobs', 'type': '{ComponentJob}'},
'inputs': {'key': 'inputs', 'type': '{PipelineInput}'},
'outputs': {'key': 'outputs', 'type': '{PipelineOutput}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword default_datastore_name:
:paramtype default_datastore_name: str
:keyword component_jobs: This is a dictionary.
:paramtype component_jobs: dict[str, ~flow.models.ComponentJob]
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.PipelineInput]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.PipelineOutput]
"""
super(Pipeline, self).__init__(**kwargs)
self.run_id = kwargs.get('run_id', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.default_datastore_name = kwargs.get('default_datastore_name', None)
self.component_jobs = kwargs.get('component_jobs', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
class PipelineDraft(msrest.serialization.Model):
"""PipelineDraft.
:ivar graph_draft_id:
:vartype graph_draft_id: str
:ivar source_pipeline_run_id:
:vartype source_pipeline_run_id: str
:ivar latest_pipeline_run_id:
:vartype latest_pipeline_run_id: str
:ivar latest_run_experiment_name:
:vartype latest_run_experiment_name: str
:ivar latest_run_experiment_id:
:vartype latest_run_experiment_id: str
:ivar is_latest_run_experiment_archived:
:vartype is_latest_run_experiment_archived: bool
:ivar status:
:vartype status: ~flow.models.PipelineStatus
:ivar graph_detail:
:vartype graph_detail: ~flow.models.PipelineRunGraphDetail
:ivar real_time_endpoint_info:
:vartype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo
:ivar linked_pipelines_info:
:vartype linked_pipelines_info: list[~flow.models.LinkedPipelineInfo]
:ivar nodes_in_draft:
:vartype nodes_in_draft: list[str]
:ivar studio_migration_info:
:vartype studio_migration_info: ~flow.models.StudioMigrationInfo
:ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:ivar pipeline_run_setting_parameters:
:vartype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter]
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar continue_run_on_failed_optional_input:
:vartype continue_run_on_failed_optional_input: bool
:ivar default_compute:
:vartype default_compute: ~flow.models.ComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.DatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.CloudPrioritySetting
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar pipeline_timeout:
:vartype pipeline_timeout: int
:ivar identity_config:
:vartype identity_config: ~flow.models.IdentitySetting
:ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:vartype graph_components_mode: str or ~flow.models.GraphComponentsMode
:ivar name:
:vartype name: str
:ivar last_edited_by:
:vartype last_edited_by: str
:ivar created_by:
:vartype created_by: str
:ivar description:
:vartype description: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'graph_draft_id': {'key': 'graphDraftId', 'type': 'str'},
'source_pipeline_run_id': {'key': 'sourcePipelineRunId', 'type': 'str'},
'latest_pipeline_run_id': {'key': 'latestPipelineRunId', 'type': 'str'},
'latest_run_experiment_name': {'key': 'latestRunExperimentName', 'type': 'str'},
'latest_run_experiment_id': {'key': 'latestRunExperimentId', 'type': 'str'},
'is_latest_run_experiment_archived': {'key': 'isLatestRunExperimentArchived', 'type': 'bool'},
'status': {'key': 'status', 'type': 'PipelineStatus'},
'graph_detail': {'key': 'graphDetail', 'type': 'PipelineRunGraphDetail'},
'real_time_endpoint_info': {'key': 'realTimeEndpointInfo', 'type': 'RealTimeEndpointInfo'},
'linked_pipelines_info': {'key': 'linkedPipelinesInfo', 'type': '[LinkedPipelineInfo]'},
'nodes_in_draft': {'key': 'nodesInDraft', 'type': '[str]'},
'studio_migration_info': {'key': 'studioMigrationInfo', 'type': 'StudioMigrationInfo'},
'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'},
'pipeline_run_setting_parameters': {'key': 'pipelineRunSettingParameters', 'type': '[RunSettingParameter]'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'continue_run_on_failed_optional_input': {'key': 'continueRunOnFailedOptionalInput', 'type': 'bool'},
'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'},
'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'pipeline_timeout': {'key': 'pipelineTimeout', 'type': 'int'},
'identity_config': {'key': 'identityConfig', 'type': 'IdentitySetting'},
'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'last_edited_by': {'key': 'lastEditedBy', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword graph_draft_id:
:paramtype graph_draft_id: str
:keyword source_pipeline_run_id:
:paramtype source_pipeline_run_id: str
:keyword latest_pipeline_run_id:
:paramtype latest_pipeline_run_id: str
:keyword latest_run_experiment_name:
:paramtype latest_run_experiment_name: str
:keyword latest_run_experiment_id:
:paramtype latest_run_experiment_id: str
:keyword is_latest_run_experiment_archived:
:paramtype is_latest_run_experiment_archived: bool
:keyword status:
:paramtype status: ~flow.models.PipelineStatus
:keyword graph_detail:
:paramtype graph_detail: ~flow.models.PipelineRunGraphDetail
:keyword real_time_endpoint_info:
:paramtype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo
:keyword linked_pipelines_info:
:paramtype linked_pipelines_info: list[~flow.models.LinkedPipelineInfo]
:keyword nodes_in_draft:
:paramtype nodes_in_draft: list[str]
:keyword studio_migration_info:
:paramtype studio_migration_info: ~flow.models.StudioMigrationInfo
:keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:keyword pipeline_run_setting_parameters:
:paramtype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter]
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword continue_run_on_failed_optional_input:
:paramtype continue_run_on_failed_optional_input: bool
:keyword default_compute:
:paramtype default_compute: ~flow.models.ComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.DatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword pipeline_timeout:
:paramtype pipeline_timeout: int
:keyword identity_config:
:paramtype identity_config: ~flow.models.IdentitySetting
:keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode
:keyword name:
:paramtype name: str
:keyword last_edited_by:
:paramtype last_edited_by: str
:keyword created_by:
:paramtype created_by: str
:keyword description:
:paramtype description: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineDraft, self).__init__(**kwargs)
self.graph_draft_id = kwargs.get('graph_draft_id', None)
self.source_pipeline_run_id = kwargs.get('source_pipeline_run_id', None)
self.latest_pipeline_run_id = kwargs.get('latest_pipeline_run_id', None)
self.latest_run_experiment_name = kwargs.get('latest_run_experiment_name', None)
self.latest_run_experiment_id = kwargs.get('latest_run_experiment_id', None)
self.is_latest_run_experiment_archived = kwargs.get('is_latest_run_experiment_archived', None)
self.status = kwargs.get('status', None)
self.graph_detail = kwargs.get('graph_detail', None)
self.real_time_endpoint_info = kwargs.get('real_time_endpoint_info', None)
self.linked_pipelines_info = kwargs.get('linked_pipelines_info', None)
self.nodes_in_draft = kwargs.get('nodes_in_draft', None)
self.studio_migration_info = kwargs.get('studio_migration_info', None)
self.flattened_sub_graphs = kwargs.get('flattened_sub_graphs', None)
self.pipeline_run_setting_parameters = kwargs.get('pipeline_run_setting_parameters', None)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.continue_run_on_failed_optional_input = kwargs.get('continue_run_on_failed_optional_input', None)
self.default_compute = kwargs.get('default_compute', None)
self.default_datastore = kwargs.get('default_datastore', None)
self.default_cloud_priority = kwargs.get('default_cloud_priority', None)
self.enforce_rerun = kwargs.get('enforce_rerun', None)
self.pipeline_parameters = kwargs.get('pipeline_parameters', None)
self.data_path_assignments = kwargs.get('data_path_assignments', None)
self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None)
self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None)
self.pipeline_timeout = kwargs.get('pipeline_timeout', None)
self.identity_config = kwargs.get('identity_config', None)
self.graph_components_mode = kwargs.get('graph_components_mode', None)
self.name = kwargs.get('name', None)
self.last_edited_by = kwargs.get('last_edited_by', None)
self.created_by = kwargs.get('created_by', None)
self.description = kwargs.get('description', None)
self.pipeline_type = kwargs.get('pipeline_type', None)
self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PipelineDraftStepDetails(msrest.serialization.Model):
"""PipelineDraftStepDetails.
:ivar run_id:
:vartype run_id: str
:ivar target:
:vartype target: str
:ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar is_reused:
:vartype is_reused: bool
:ivar reused_run_id:
:vartype reused_run_id: str
:ivar reused_pipeline_run_id:
:vartype reused_pipeline_run_id: str
:ivar logs: This is a dictionary.
:vartype logs: dict[str, str]
:ivar output_log:
:vartype output_log: str
:ivar run_configuration:
:vartype run_configuration: ~flow.models.RunConfiguration
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, str]
:ivar port_outputs: This is a dictionary.
:vartype port_outputs: dict[str, ~flow.models.PortOutputInfo]
:ivar is_experiment_archived:
:vartype is_experiment_archived: bool
"""
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'is_reused': {'key': 'isReused', 'type': 'bool'},
'reused_run_id': {'key': 'reusedRunId', 'type': 'str'},
'reused_pipeline_run_id': {'key': 'reusedPipelineRunId', 'type': 'str'},
'logs': {'key': 'logs', 'type': '{str}'},
'output_log': {'key': 'outputLog', 'type': 'str'},
'run_configuration': {'key': 'runConfiguration', 'type': 'RunConfiguration'},
'outputs': {'key': 'outputs', 'type': '{str}'},
'port_outputs': {'key': 'portOutputs', 'type': '{PortOutputInfo}'},
'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword target:
:paramtype target: str
:keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword is_reused:
:paramtype is_reused: bool
:keyword reused_run_id:
:paramtype reused_run_id: str
:keyword reused_pipeline_run_id:
:paramtype reused_pipeline_run_id: str
:keyword logs: This is a dictionary.
:paramtype logs: dict[str, str]
:keyword output_log:
:paramtype output_log: str
:keyword run_configuration:
:paramtype run_configuration: ~flow.models.RunConfiguration
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, str]
:keyword port_outputs: This is a dictionary.
:paramtype port_outputs: dict[str, ~flow.models.PortOutputInfo]
:keyword is_experiment_archived:
:paramtype is_experiment_archived: bool
"""
super(PipelineDraftStepDetails, self).__init__(**kwargs)
self.run_id = kwargs.get('run_id', None)
self.target = kwargs.get('target', None)
self.status = kwargs.get('status', None)
self.status_detail = kwargs.get('status_detail', None)
self.parent_run_id = kwargs.get('parent_run_id', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.is_reused = kwargs.get('is_reused', None)
self.reused_run_id = kwargs.get('reused_run_id', None)
self.reused_pipeline_run_id = kwargs.get('reused_pipeline_run_id', None)
self.logs = kwargs.get('logs', None)
self.output_log = kwargs.get('output_log', None)
self.run_configuration = kwargs.get('run_configuration', None)
self.outputs = kwargs.get('outputs', None)
self.port_outputs = kwargs.get('port_outputs', None)
self.is_experiment_archived = kwargs.get('is_experiment_archived', None)
class PipelineDraftSummary(msrest.serialization.Model):
"""PipelineDraftSummary.
:ivar name:
:vartype name: str
:ivar last_edited_by:
:vartype last_edited_by: str
:ivar created_by:
:vartype created_by: str
:ivar description:
:vartype description: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'last_edited_by': {'key': 'lastEditedBy', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword last_edited_by:
:paramtype last_edited_by: str
:keyword created_by:
:paramtype created_by: str
:keyword description:
:paramtype description: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineDraftSummary, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.last_edited_by = kwargs.get('last_edited_by', None)
self.created_by = kwargs.get('created_by', None)
self.description = kwargs.get('description', None)
self.pipeline_type = kwargs.get('pipeline_type', None)
self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PipelineEndpoint(msrest.serialization.Model):
"""PipelineEndpoint.
:ivar default_version:
:vartype default_version: str
:ivar default_pipeline_id:
:vartype default_pipeline_id: str
:ivar default_graph_id:
:vartype default_graph_id: str
:ivar rest_endpoint:
:vartype rest_endpoint: str
:ivar published_date:
:vartype published_date: ~datetime.datetime
:ivar published_by:
:vartype published_by: str
:ivar parameters: This is a dictionary.
:vartype parameters: dict[str, str]
:ivar data_set_definition_value_assignment: This is a dictionary.
:vartype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar default_pipeline_name:
:vartype default_pipeline_name: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar updated_by:
:vartype updated_by: str
:ivar swagger_url:
:vartype swagger_url: str
:ivar last_run_time:
:vartype last_run_time: ~datetime.datetime
:ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:vartype last_run_status: str or ~flow.models.PipelineRunStatusCode
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'default_version': {'key': 'defaultVersion', 'type': 'str'},
'default_pipeline_id': {'key': 'defaultPipelineId', 'type': 'str'},
'default_graph_id': {'key': 'defaultGraphId', 'type': 'str'},
'rest_endpoint': {'key': 'restEndpoint', 'type': 'str'},
'published_date': {'key': 'publishedDate', 'type': 'iso-8601'},
'published_by': {'key': 'publishedBy', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{str}'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': '{DataSetDefinitionValue}'},
'default_pipeline_name': {'key': 'defaultPipelineName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'updated_by': {'key': 'updatedBy', 'type': 'str'},
'swagger_url': {'key': 'swaggerUrl', 'type': 'str'},
'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'},
'last_run_status': {'key': 'lastRunStatus', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword default_version:
:paramtype default_version: str
:keyword default_pipeline_id:
:paramtype default_pipeline_id: str
:keyword default_graph_id:
:paramtype default_graph_id: str
:keyword rest_endpoint:
:paramtype rest_endpoint: str
:keyword published_date:
:paramtype published_date: ~datetime.datetime
:keyword published_by:
:paramtype published_by: str
:keyword parameters: This is a dictionary.
:paramtype parameters: dict[str, str]
:keyword data_set_definition_value_assignment: This is a dictionary.
:paramtype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:keyword default_pipeline_name:
:paramtype default_pipeline_name: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword updated_by:
:paramtype updated_by: str
:keyword swagger_url:
:paramtype swagger_url: str
:keyword last_run_time:
:paramtype last_run_time: ~datetime.datetime
:keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed",
"Finished", "Canceled", "Queued", "CancelRequested".
:paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineEndpoint, self).__init__(**kwargs)
self.default_version = kwargs.get('default_version', None)
self.default_pipeline_id = kwargs.get('default_pipeline_id', None)
self.default_graph_id = kwargs.get('default_graph_id', None)
self.rest_endpoint = kwargs.get('rest_endpoint', None)
self.published_date = kwargs.get('published_date', None)
self.published_by = kwargs.get('published_by', None)
self.parameters = kwargs.get('parameters', None)
self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None)
self.default_pipeline_name = kwargs.get('default_pipeline_name', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.updated_by = kwargs.get('updated_by', None)
self.swagger_url = kwargs.get('swagger_url', None)
self.last_run_time = kwargs.get('last_run_time', None)
self.last_run_status = kwargs.get('last_run_status', None)
self.tags = kwargs.get('tags', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PipelineEndpointSummary(msrest.serialization.Model):
"""PipelineEndpointSummary.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar updated_by:
:vartype updated_by: str
:ivar swagger_url:
:vartype swagger_url: str
:ivar last_run_time:
:vartype last_run_time: ~datetime.datetime
:ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:vartype last_run_status: str or ~flow.models.PipelineRunStatusCode
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'updated_by': {'key': 'updatedBy', 'type': 'str'},
'swagger_url': {'key': 'swaggerUrl', 'type': 'str'},
'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'},
'last_run_status': {'key': 'lastRunStatus', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword updated_by:
:paramtype updated_by: str
:keyword swagger_url:
:paramtype swagger_url: str
:keyword last_run_time:
:paramtype last_run_time: ~datetime.datetime
:keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed",
"Finished", "Canceled", "Queued", "CancelRequested".
:paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineEndpointSummary, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.updated_by = kwargs.get('updated_by', None)
self.swagger_url = kwargs.get('swagger_url', None)
self.last_run_time = kwargs.get('last_run_time', None)
self.last_run_status = kwargs.get('last_run_status', None)
self.tags = kwargs.get('tags', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PipelineGraph(msrest.serialization.Model):
"""PipelineGraph.
:ivar graph_module_dtos:
:vartype graph_module_dtos: list[~flow.models.ModuleDto]
:ivar graph_data_sources:
:vartype graph_data_sources: list[~flow.models.DataInfo]
:ivar graphs: This is a dictionary.
:vartype graphs: dict[str, ~flow.models.PipelineGraph]
:ivar graph_drafts: This is a dictionary.
:vartype graph_drafts: dict[str, ~flow.models.PipelineGraph]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar referenced_node_id:
:vartype referenced_node_id: str
:ivar pipeline_run_setting_parameters:
:vartype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter]
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar real_time_endpoint_info:
:vartype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo
:ivar node_telemetry_meta_infos:
:vartype node_telemetry_meta_infos: list[~flow.models.NodeTelemetryMetaInfo]
:ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:vartype graph_components_mode: str or ~flow.models.GraphComponentsMode
:ivar module_nodes:
:vartype module_nodes: list[~flow.models.GraphModuleNode]
:ivar dataset_nodes:
:vartype dataset_nodes: list[~flow.models.GraphDatasetNode]
:ivar sub_graph_nodes:
:vartype sub_graph_nodes: list[~flow.models.GraphReferenceNode]
:ivar control_reference_nodes:
:vartype control_reference_nodes: list[~flow.models.GraphControlReferenceNode]
:ivar control_nodes:
:vartype control_nodes: list[~flow.models.GraphControlNode]
:ivar edges:
:vartype edges: list[~flow.models.GraphEdge]
:ivar entity_interface:
:vartype entity_interface: ~flow.models.EntityInterface
:ivar graph_layout:
:vartype graph_layout: ~flow.models.GraphLayout
:ivar created_by:
:vartype created_by: ~flow.models.CreatedBy
:ivar last_updated_by:
:vartype last_updated_by: ~flow.models.CreatedBy
:ivar default_compute:
:vartype default_compute: ~flow.models.ComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.DatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.CloudPrioritySetting
:ivar extended_properties: This is a dictionary.
:vartype extended_properties: dict[str, str]
:ivar parent_sub_graph_module_ids:
:vartype parent_sub_graph_module_ids: list[str]
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'graph_module_dtos': {'key': 'graphModuleDtos', 'type': '[ModuleDto]'},
'graph_data_sources': {'key': 'graphDataSources', 'type': '[DataInfo]'},
'graphs': {'key': 'graphs', 'type': '{PipelineGraph}'},
'graph_drafts': {'key': 'graphDrafts', 'type': '{PipelineGraph}'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'referenced_node_id': {'key': 'referencedNodeId', 'type': 'str'},
'pipeline_run_setting_parameters': {'key': 'pipelineRunSettingParameters', 'type': '[RunSettingParameter]'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'real_time_endpoint_info': {'key': 'realTimeEndpointInfo', 'type': 'RealTimeEndpointInfo'},
'node_telemetry_meta_infos': {'key': 'nodeTelemetryMetaInfos', 'type': '[NodeTelemetryMetaInfo]'},
'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'},
'module_nodes': {'key': 'moduleNodes', 'type': '[GraphModuleNode]'},
'dataset_nodes': {'key': 'datasetNodes', 'type': '[GraphDatasetNode]'},
'sub_graph_nodes': {'key': 'subGraphNodes', 'type': '[GraphReferenceNode]'},
'control_reference_nodes': {'key': 'controlReferenceNodes', 'type': '[GraphControlReferenceNode]'},
'control_nodes': {'key': 'controlNodes', 'type': '[GraphControlNode]'},
'edges': {'key': 'edges', 'type': '[GraphEdge]'},
'entity_interface': {'key': 'entityInterface', 'type': 'EntityInterface'},
'graph_layout': {'key': 'graphLayout', 'type': 'GraphLayout'},
'created_by': {'key': 'createdBy', 'type': 'CreatedBy'},
'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'CreatedBy'},
'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'},
'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'},
'extended_properties': {'key': 'extendedProperties', 'type': '{str}'},
'parent_sub_graph_module_ids': {'key': 'parentSubGraphModuleIds', 'type': '[str]'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword graph_module_dtos:
:paramtype graph_module_dtos: list[~flow.models.ModuleDto]
:keyword graph_data_sources:
:paramtype graph_data_sources: list[~flow.models.DataInfo]
:keyword graphs: This is a dictionary.
:paramtype graphs: dict[str, ~flow.models.PipelineGraph]
:keyword graph_drafts: This is a dictionary.
:paramtype graph_drafts: dict[str, ~flow.models.PipelineGraph]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword referenced_node_id:
:paramtype referenced_node_id: str
:keyword pipeline_run_setting_parameters:
:paramtype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter]
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword real_time_endpoint_info:
:paramtype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo
:keyword node_telemetry_meta_infos:
:paramtype node_telemetry_meta_infos: list[~flow.models.NodeTelemetryMetaInfo]
:keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode
:keyword module_nodes:
:paramtype module_nodes: list[~flow.models.GraphModuleNode]
:keyword dataset_nodes:
:paramtype dataset_nodes: list[~flow.models.GraphDatasetNode]
:keyword sub_graph_nodes:
:paramtype sub_graph_nodes: list[~flow.models.GraphReferenceNode]
:keyword control_reference_nodes:
:paramtype control_reference_nodes: list[~flow.models.GraphControlReferenceNode]
:keyword control_nodes:
:paramtype control_nodes: list[~flow.models.GraphControlNode]
:keyword edges:
:paramtype edges: list[~flow.models.GraphEdge]
:keyword entity_interface:
:paramtype entity_interface: ~flow.models.EntityInterface
:keyword graph_layout:
:paramtype graph_layout: ~flow.models.GraphLayout
:keyword created_by:
:paramtype created_by: ~flow.models.CreatedBy
:keyword last_updated_by:
:paramtype last_updated_by: ~flow.models.CreatedBy
:keyword default_compute:
:paramtype default_compute: ~flow.models.ComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.DatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting
:keyword extended_properties: This is a dictionary.
:paramtype extended_properties: dict[str, str]
:keyword parent_sub_graph_module_ids:
:paramtype parent_sub_graph_module_ids: list[str]
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineGraph, self).__init__(**kwargs)
self.graph_module_dtos = kwargs.get('graph_module_dtos', None)
self.graph_data_sources = kwargs.get('graph_data_sources', None)
self.graphs = kwargs.get('graphs', None)
self.graph_drafts = kwargs.get('graph_drafts', None)
self.module_node_run_settings = kwargs.get('module_node_run_settings', None)
self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None)
self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None)
self.referenced_node_id = kwargs.get('referenced_node_id', None)
self.pipeline_run_setting_parameters = kwargs.get('pipeline_run_setting_parameters', None)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.real_time_endpoint_info = kwargs.get('real_time_endpoint_info', None)
self.node_telemetry_meta_infos = kwargs.get('node_telemetry_meta_infos', None)
self.graph_components_mode = kwargs.get('graph_components_mode', None)
self.module_nodes = kwargs.get('module_nodes', None)
self.dataset_nodes = kwargs.get('dataset_nodes', None)
self.sub_graph_nodes = kwargs.get('sub_graph_nodes', None)
self.control_reference_nodes = kwargs.get('control_reference_nodes', None)
self.control_nodes = kwargs.get('control_nodes', None)
self.edges = kwargs.get('edges', None)
self.entity_interface = kwargs.get('entity_interface', None)
self.graph_layout = kwargs.get('graph_layout', None)
self.created_by = kwargs.get('created_by', None)
self.last_updated_by = kwargs.get('last_updated_by', None)
self.default_compute = kwargs.get('default_compute', None)
self.default_datastore = kwargs.get('default_datastore', None)
self.default_cloud_priority = kwargs.get('default_cloud_priority', None)
self.extended_properties = kwargs.get('extended_properties', None)
self.parent_sub_graph_module_ids = kwargs.get('parent_sub_graph_module_ids', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PipelineInput(msrest.serialization.Model):
"""PipelineInput.
:ivar data:
:vartype data: ~flow.models.InputData
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'InputData'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data:
:paramtype data: ~flow.models.InputData
"""
super(PipelineInput, self).__init__(**kwargs)
self.data = kwargs.get('data', None)
class PipelineJob(msrest.serialization.Model):
"""PipelineJob.
:ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:vartype job_type: str or ~flow.models.JobType
:ivar pipeline_job_type: The only acceptable values to pass in are None and "AzureML". The
default value is None.
:vartype pipeline_job_type: str
:ivar pipeline:
:vartype pipeline: ~flow.models.Pipeline
:ivar compute_id:
:vartype compute_id: str
:ivar run_id:
:vartype run_id: str
:ivar settings: Anything.
:vartype settings: any
:ivar component_jobs: This is a dictionary.
:vartype component_jobs: dict[str, ~flow.models.MfeInternalV20211001ComponentJob]
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.JobInput]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.JobOutput]
:ivar bindings:
:vartype bindings: list[~flow.models.Binding]
:ivar jobs: This is a dictionary.
:vartype jobs: dict[str, any]
:ivar input_bindings: This is a dictionary.
:vartype input_bindings: dict[str, ~flow.models.InputDataBinding]
:ivar output_bindings: This is a dictionary.
:vartype output_bindings: dict[str, ~flow.models.OutputDataBinding]
:ivar source_job_id:
:vartype source_job_id: str
:ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:vartype provisioning_state: str or ~flow.models.JobProvisioningState
:ivar parent_job_name:
:vartype parent_job_name: str
:ivar display_name:
:vartype display_name: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing",
"Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled",
"NotResponding", "Paused", "Unknown", "Scheduled".
:vartype status: str or ~flow.models.JobStatus
:ivar interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:vartype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:ivar identity:
:vartype identity: ~flow.models.MfeInternalIdentityConfiguration
:ivar compute:
:vartype compute: ~flow.models.ComputeConfiguration
:ivar priority:
:vartype priority: int
:ivar output:
:vartype output: ~flow.models.JobOutputArtifacts
:ivar is_archived:
:vartype is_archived: bool
:ivar schedule:
:vartype schedule: ~flow.models.ScheduleBase
:ivar component_id:
:vartype component_id: str
:ivar notification_setting:
:vartype notification_setting: ~flow.models.NotificationSetting
:ivar secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:vartype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'job_type': {'key': 'jobType', 'type': 'str'},
'pipeline_job_type': {'key': 'pipelineJobType', 'type': 'str'},
'pipeline': {'key': 'pipeline', 'type': 'Pipeline'},
'compute_id': {'key': 'computeId', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'settings': {'key': 'settings', 'type': 'object'},
'component_jobs': {'key': 'componentJobs', 'type': '{MfeInternalV20211001ComponentJob}'},
'inputs': {'key': 'inputs', 'type': '{JobInput}'},
'outputs': {'key': 'outputs', 'type': '{JobOutput}'},
'bindings': {'key': 'bindings', 'type': '[Binding]'},
'jobs': {'key': 'jobs', 'type': '{object}'},
'input_bindings': {'key': 'inputBindings', 'type': '{InputDataBinding}'},
'output_bindings': {'key': 'outputBindings', 'type': '{OutputDataBinding}'},
'source_job_id': {'key': 'sourceJobId', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'parent_job_name': {'key': 'parentJobName', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
'identity': {'key': 'identity', 'type': 'MfeInternalIdentityConfiguration'},
'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
'priority': {'key': 'priority', 'type': 'int'},
'output': {'key': 'output', 'type': 'JobOutputArtifacts'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'schedule': {'key': 'schedule', 'type': 'ScheduleBase'},
'component_id': {'key': 'componentId', 'type': 'str'},
'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'},
'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{MfeInternalSecretConfiguration}'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:paramtype job_type: str or ~flow.models.JobType
:keyword pipeline_job_type: The only acceptable values to pass in are None and "AzureML". The
default value is None.
:paramtype pipeline_job_type: str
:keyword pipeline:
:paramtype pipeline: ~flow.models.Pipeline
:keyword compute_id:
:paramtype compute_id: str
:keyword run_id:
:paramtype run_id: str
:keyword settings: Anything.
:paramtype settings: any
:keyword component_jobs: This is a dictionary.
:paramtype component_jobs: dict[str, ~flow.models.MfeInternalV20211001ComponentJob]
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.JobInput]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.JobOutput]
:keyword bindings:
:paramtype bindings: list[~flow.models.Binding]
:keyword jobs: This is a dictionary.
:paramtype jobs: dict[str, any]
:keyword input_bindings: This is a dictionary.
:paramtype input_bindings: dict[str, ~flow.models.InputDataBinding]
:keyword output_bindings: This is a dictionary.
:paramtype output_bindings: dict[str, ~flow.models.OutputDataBinding]
:keyword source_job_id:
:paramtype source_job_id: str
:keyword provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:paramtype provisioning_state: str or ~flow.models.JobProvisioningState
:keyword parent_job_name:
:paramtype parent_job_name: str
:keyword display_name:
:paramtype display_name: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword status: Possible values include: "NotStarted", "Starting", "Provisioning",
"Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed",
"Canceled", "NotResponding", "Paused", "Unknown", "Scheduled".
:paramtype status: str or ~flow.models.JobStatus
:keyword interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:paramtype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:keyword identity:
:paramtype identity: ~flow.models.MfeInternalIdentityConfiguration
:keyword compute:
:paramtype compute: ~flow.models.ComputeConfiguration
:keyword priority:
:paramtype priority: int
:keyword output:
:paramtype output: ~flow.models.JobOutputArtifacts
:keyword is_archived:
:paramtype is_archived: bool
:keyword schedule:
:paramtype schedule: ~flow.models.ScheduleBase
:keyword component_id:
:paramtype component_id: str
:keyword notification_setting:
:paramtype notification_setting: ~flow.models.NotificationSetting
:keyword secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:paramtype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(PipelineJob, self).__init__(**kwargs)
self.job_type = kwargs.get('job_type', None)
self.pipeline_job_type = kwargs.get('pipeline_job_type', None)
self.pipeline = kwargs.get('pipeline', None)
self.compute_id = kwargs.get('compute_id', None)
self.run_id = kwargs.get('run_id', None)
self.settings = kwargs.get('settings', None)
self.component_jobs = kwargs.get('component_jobs', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.bindings = kwargs.get('bindings', None)
self.jobs = kwargs.get('jobs', None)
self.input_bindings = kwargs.get('input_bindings', None)
self.output_bindings = kwargs.get('output_bindings', None)
self.source_job_id = kwargs.get('source_job_id', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.parent_job_name = kwargs.get('parent_job_name', None)
self.display_name = kwargs.get('display_name', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.status = kwargs.get('status', None)
self.interaction_endpoints = kwargs.get('interaction_endpoints', None)
self.identity = kwargs.get('identity', None)
self.compute = kwargs.get('compute', None)
self.priority = kwargs.get('priority', None)
self.output = kwargs.get('output', None)
self.is_archived = kwargs.get('is_archived', None)
self.schedule = kwargs.get('schedule', None)
self.component_id = kwargs.get('component_id', None)
self.notification_setting = kwargs.get('notification_setting', None)
self.secrets_configuration = kwargs.get('secrets_configuration', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
class PipelineJobRuntimeBasicSettings(msrest.serialization.Model):
"""PipelineJobRuntimeBasicSettings.
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar experiment_name:
:vartype experiment_name: str
:ivar pipeline_job_name:
:vartype pipeline_job_name: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar trigger_time_string:
:vartype trigger_time_string: str
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
"""
_attribute_map = {
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'trigger_time_string': {'key': 'triggerTimeString', 'type': 'str'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword experiment_name:
:paramtype experiment_name: str
:keyword pipeline_job_name:
:paramtype pipeline_job_name: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword trigger_time_string:
:paramtype trigger_time_string: str
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
"""
super(PipelineJobRuntimeBasicSettings, self).__init__(**kwargs)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.pipeline_job_name = kwargs.get('pipeline_job_name', None)
self.tags = kwargs.get('tags', None)
self.display_name = kwargs.get('display_name', None)
self.description = kwargs.get('description', None)
self.trigger_time_string = kwargs.get('trigger_time_string', None)
self.pipeline_parameters = kwargs.get('pipeline_parameters', None)
self.data_path_assignments = kwargs.get('data_path_assignments', None)
self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None)
self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None)
class PipelineJobScheduleDto(msrest.serialization.Model):
"""PipelineJobScheduleDto.
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar name:
:vartype name: str
:ivar pipeline_job_name:
:vartype pipeline_job_name: str
:ivar pipeline_job_runtime_settings:
:vartype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:ivar display_name:
:vartype display_name: str
:ivar trigger_type: Possible values include: "Recurrence", "Cron".
:vartype trigger_type: str or ~flow.models.TriggerType
:ivar recurrence:
:vartype recurrence: ~flow.models.Recurrence
:ivar cron:
:vartype cron: ~flow.models.Cron
:ivar status: Possible values include: "Enabled", "Disabled".
:vartype status: str or ~flow.models.ScheduleStatus
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'name': {'key': 'name', 'type': 'str'},
'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'},
'pipeline_job_runtime_settings': {'key': 'pipelineJobRuntimeSettings', 'type': 'PipelineJobRuntimeBasicSettings'},
'display_name': {'key': 'displayName', 'type': 'str'},
'trigger_type': {'key': 'triggerType', 'type': 'str'},
'recurrence': {'key': 'recurrence', 'type': 'Recurrence'},
'cron': {'key': 'cron', 'type': 'Cron'},
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword name:
:paramtype name: str
:keyword pipeline_job_name:
:paramtype pipeline_job_name: str
:keyword pipeline_job_runtime_settings:
:paramtype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:keyword display_name:
:paramtype display_name: str
:keyword trigger_type: Possible values include: "Recurrence", "Cron".
:paramtype trigger_type: str or ~flow.models.TriggerType
:keyword recurrence:
:paramtype recurrence: ~flow.models.Recurrence
:keyword cron:
:paramtype cron: ~flow.models.Cron
:keyword status: Possible values include: "Enabled", "Disabled".
:paramtype status: str or ~flow.models.ScheduleStatus
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(PipelineJobScheduleDto, self).__init__(**kwargs)
self.system_data = kwargs.get('system_data', None)
self.name = kwargs.get('name', None)
self.pipeline_job_name = kwargs.get('pipeline_job_name', None)
self.pipeline_job_runtime_settings = kwargs.get('pipeline_job_runtime_settings', None)
self.display_name = kwargs.get('display_name', None)
self.trigger_type = kwargs.get('trigger_type', None)
self.recurrence = kwargs.get('recurrence', None)
self.cron = kwargs.get('cron', None)
self.status = kwargs.get('status', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
class PipelineOutput(msrest.serialization.Model):
"""PipelineOutput.
:ivar data:
:vartype data: ~flow.models.MfeInternalOutputData
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'MfeInternalOutputData'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data:
:paramtype data: ~flow.models.MfeInternalOutputData
"""
super(PipelineOutput, self).__init__(**kwargs)
self.data = kwargs.get('data', None)
class PipelineRun(msrest.serialization.Model):
"""PipelineRun.
:ivar pipeline_id:
:vartype pipeline_id: str
:ivar run_source:
:vartype run_source: str
:ivar run_type: Possible values include: "HTTP", "SDK", "Schedule", "Portal".
:vartype run_type: str or ~flow.models.RunType
:ivar parameters: This is a dictionary.
:vartype parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignment: This is a dictionary.
:vartype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar total_steps:
:vartype total_steps: int
:ivar logs: This is a dictionary.
:vartype logs: dict[str, str]
:ivar user_alias:
:vartype user_alias: str
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar continue_run_on_failed_optional_input:
:vartype continue_run_on_failed_optional_input: bool
:ivar default_compute:
:vartype default_compute: ~flow.models.ComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.DatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.CloudPrioritySetting
:ivar pipeline_timeout_seconds:
:vartype pipeline_timeout_seconds: int
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar identity_config:
:vartype identity_config: ~flow.models.IdentitySetting
:ivar description:
:vartype description: str
:ivar display_name:
:vartype display_name: str
:ivar run_number:
:vartype run_number: int
:ivar status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:vartype status_code: str or ~flow.models.PipelineStatusCode
:ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype run_status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar graph_id:
:vartype graph_id: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar is_experiment_archived:
:vartype is_experiment_archived: bool
:ivar submitted_by:
:vartype submitted_by: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar step_tags: This is a dictionary.
:vartype step_tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar aether_start_time:
:vartype aether_start_time: ~datetime.datetime
:ivar aether_end_time:
:vartype aether_end_time: ~datetime.datetime
:ivar run_history_start_time:
:vartype run_history_start_time: ~datetime.datetime
:ivar run_history_end_time:
:vartype run_history_end_time: ~datetime.datetime
:ivar unique_child_run_compute_targets:
:vartype unique_child_run_compute_targets: list[str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_validation = {
'unique_child_run_compute_targets': {'unique': True},
}
_attribute_map = {
'pipeline_id': {'key': 'pipelineId', 'type': 'str'},
'run_source': {'key': 'runSource', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'total_steps': {'key': 'totalSteps', 'type': 'int'},
'logs': {'key': 'logs', 'type': '{str}'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'continue_run_on_failed_optional_input': {'key': 'continueRunOnFailedOptionalInput', 'type': 'bool'},
'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'},
'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'},
'pipeline_timeout_seconds': {'key': 'pipelineTimeoutSeconds', 'type': 'int'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'identity_config': {'key': 'identityConfig', 'type': 'IdentitySetting'},
'description': {'key': 'description', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'run_number': {'key': 'runNumber', 'type': 'int'},
'status_code': {'key': 'statusCode', 'type': 'str'},
'run_status': {'key': 'runStatus', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'graph_id': {'key': 'graphId', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'},
'submitted_by': {'key': 'submittedBy', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'step_tags': {'key': 'stepTags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'aether_start_time': {'key': 'aetherStartTime', 'type': 'iso-8601'},
'aether_end_time': {'key': 'aetherEndTime', 'type': 'iso-8601'},
'run_history_start_time': {'key': 'runHistoryStartTime', 'type': 'iso-8601'},
'run_history_end_time': {'key': 'runHistoryEndTime', 'type': 'iso-8601'},
'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword pipeline_id:
:paramtype pipeline_id: str
:keyword run_source:
:paramtype run_source: str
:keyword run_type: Possible values include: "HTTP", "SDK", "Schedule", "Portal".
:paramtype run_type: str or ~flow.models.RunType
:keyword parameters: This is a dictionary.
:paramtype parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignment: This is a dictionary.
:paramtype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword total_steps:
:paramtype total_steps: int
:keyword logs: This is a dictionary.
:paramtype logs: dict[str, str]
:keyword user_alias:
:paramtype user_alias: str
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword continue_run_on_failed_optional_input:
:paramtype continue_run_on_failed_optional_input: bool
:keyword default_compute:
:paramtype default_compute: ~flow.models.ComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.DatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting
:keyword pipeline_timeout_seconds:
:paramtype pipeline_timeout_seconds: int
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword identity_config:
:paramtype identity_config: ~flow.models.IdentitySetting
:keyword description:
:paramtype description: str
:keyword display_name:
:paramtype display_name: str
:keyword run_number:
:paramtype run_number: int
:keyword status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:paramtype status_code: str or ~flow.models.PipelineStatusCode
:keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype run_status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword graph_id:
:paramtype graph_id: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword is_experiment_archived:
:paramtype is_experiment_archived: bool
:keyword submitted_by:
:paramtype submitted_by: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword step_tags: This is a dictionary.
:paramtype step_tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword aether_start_time:
:paramtype aether_start_time: ~datetime.datetime
:keyword aether_end_time:
:paramtype aether_end_time: ~datetime.datetime
:keyword run_history_start_time:
:paramtype run_history_start_time: ~datetime.datetime
:keyword run_history_end_time:
:paramtype run_history_end_time: ~datetime.datetime
:keyword unique_child_run_compute_targets:
:paramtype unique_child_run_compute_targets: list[str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineRun, self).__init__(**kwargs)
self.pipeline_id = kwargs.get('pipeline_id', None)
self.run_source = kwargs.get('run_source', None)
self.run_type = kwargs.get('run_type', None)
self.parameters = kwargs.get('parameters', None)
self.data_path_assignments = kwargs.get('data_path_assignments', None)
self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None)
self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None)
self.total_steps = kwargs.get('total_steps', None)
self.logs = kwargs.get('logs', None)
self.user_alias = kwargs.get('user_alias', None)
self.enforce_rerun = kwargs.get('enforce_rerun', None)
self.continue_run_on_failed_optional_input = kwargs.get('continue_run_on_failed_optional_input', None)
self.default_compute = kwargs.get('default_compute', None)
self.default_datastore = kwargs.get('default_datastore', None)
self.default_cloud_priority = kwargs.get('default_cloud_priority', None)
self.pipeline_timeout_seconds = kwargs.get('pipeline_timeout_seconds', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.identity_config = kwargs.get('identity_config', None)
self.description = kwargs.get('description', None)
self.display_name = kwargs.get('display_name', None)
self.run_number = kwargs.get('run_number', None)
self.status_code = kwargs.get('status_code', None)
self.run_status = kwargs.get('run_status', None)
self.status_detail = kwargs.get('status_detail', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.graph_id = kwargs.get('graph_id', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.is_experiment_archived = kwargs.get('is_experiment_archived', None)
self.submitted_by = kwargs.get('submitted_by', None)
self.tags = kwargs.get('tags', None)
self.step_tags = kwargs.get('step_tags', None)
self.properties = kwargs.get('properties', None)
self.aether_start_time = kwargs.get('aether_start_time', None)
self.aether_end_time = kwargs.get('aether_end_time', None)
self.run_history_start_time = kwargs.get('run_history_start_time', None)
self.run_history_end_time = kwargs.get('run_history_end_time', None)
self.unique_child_run_compute_targets = kwargs.get('unique_child_run_compute_targets', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PipelineRunGraphDetail(msrest.serialization.Model):
"""PipelineRunGraphDetail.
:ivar graph:
:vartype graph: ~flow.models.PipelineGraph
:ivar graph_nodes_status: This is a dictionary.
:vartype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo]
"""
_attribute_map = {
'graph': {'key': 'graph', 'type': 'PipelineGraph'},
'graph_nodes_status': {'key': 'graphNodesStatus', 'type': '{GraphNodeStatusInfo}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword graph:
:paramtype graph: ~flow.models.PipelineGraph
:keyword graph_nodes_status: This is a dictionary.
:paramtype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo]
"""
super(PipelineRunGraphDetail, self).__init__(**kwargs)
self.graph = kwargs.get('graph', None)
self.graph_nodes_status = kwargs.get('graph_nodes_status', None)
class PipelineRunGraphStatus(msrest.serialization.Model):
"""PipelineRunGraphStatus.
:ivar status:
:vartype status: ~flow.models.PipelineStatus
:ivar graph_nodes_status: This is a dictionary.
:vartype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo]
:ivar experiment_id:
:vartype experiment_id: str
:ivar is_experiment_archived:
:vartype is_experiment_archived: bool
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'PipelineStatus'},
'graph_nodes_status': {'key': 'graphNodesStatus', 'type': '{GraphNodeStatusInfo}'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword status:
:paramtype status: ~flow.models.PipelineStatus
:keyword graph_nodes_status: This is a dictionary.
:paramtype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo]
:keyword experiment_id:
:paramtype experiment_id: str
:keyword is_experiment_archived:
:paramtype is_experiment_archived: bool
"""
super(PipelineRunGraphStatus, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.graph_nodes_status = kwargs.get('graph_nodes_status', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.is_experiment_archived = kwargs.get('is_experiment_archived', None)
class PipelineRunProfile(msrest.serialization.Model):
"""PipelineRunProfile.
:ivar run_id:
:vartype run_id: str
:ivar node_id:
:vartype node_id: str
:ivar run_url:
:vartype run_url: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar description:
:vartype description: str
:ivar status:
:vartype status: ~flow.models.PipelineRunStatus
:ivar create_time:
:vartype create_time: long
:ivar start_time:
:vartype start_time: long
:ivar end_time:
:vartype end_time: long
:ivar profiling_time:
:vartype profiling_time: long
:ivar step_runs_profile:
:vartype step_runs_profile: list[~flow.models.StepRunProfile]
:ivar sub_pipeline_run_profile:
:vartype sub_pipeline_run_profile: list[~flow.models.PipelineRunProfile]
"""
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'node_id': {'key': 'nodeId', 'type': 'str'},
'run_url': {'key': 'runUrl', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'status': {'key': 'status', 'type': 'PipelineRunStatus'},
'create_time': {'key': 'createTime', 'type': 'long'},
'start_time': {'key': 'startTime', 'type': 'long'},
'end_time': {'key': 'endTime', 'type': 'long'},
'profiling_time': {'key': 'profilingTime', 'type': 'long'},
'step_runs_profile': {'key': 'stepRunsProfile', 'type': '[StepRunProfile]'},
'sub_pipeline_run_profile': {'key': 'subPipelineRunProfile', 'type': '[PipelineRunProfile]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword node_id:
:paramtype node_id: str
:keyword run_url:
:paramtype run_url: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword description:
:paramtype description: str
:keyword status:
:paramtype status: ~flow.models.PipelineRunStatus
:keyword create_time:
:paramtype create_time: long
:keyword start_time:
:paramtype start_time: long
:keyword end_time:
:paramtype end_time: long
:keyword profiling_time:
:paramtype profiling_time: long
:keyword step_runs_profile:
:paramtype step_runs_profile: list[~flow.models.StepRunProfile]
:keyword sub_pipeline_run_profile:
:paramtype sub_pipeline_run_profile: list[~flow.models.PipelineRunProfile]
"""
super(PipelineRunProfile, self).__init__(**kwargs)
self.run_id = kwargs.get('run_id', None)
self.node_id = kwargs.get('node_id', None)
self.run_url = kwargs.get('run_url', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.description = kwargs.get('description', None)
self.status = kwargs.get('status', None)
self.create_time = kwargs.get('create_time', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.profiling_time = kwargs.get('profiling_time', None)
self.step_runs_profile = kwargs.get('step_runs_profile', None)
self.sub_pipeline_run_profile = kwargs.get('sub_pipeline_run_profile', None)
class PipelineRunStatus(msrest.serialization.Model):
"""PipelineRunStatus.
:ivar status_code: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:vartype status_code: str or ~flow.models.PipelineRunStatusCode
:ivar status_detail:
:vartype status_detail: str
:ivar creation_time:
:vartype creation_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
"""
_attribute_map = {
'status_code': {'key': 'statusCode', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'creation_time': {'key': 'creationTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword status_code: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:paramtype status_code: str or ~flow.models.PipelineRunStatusCode
:keyword status_detail:
:paramtype status_detail: str
:keyword creation_time:
:paramtype creation_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
"""
super(PipelineRunStatus, self).__init__(**kwargs)
self.status_code = kwargs.get('status_code', None)
self.status_detail = kwargs.get('status_detail', None)
self.creation_time = kwargs.get('creation_time', None)
self.end_time = kwargs.get('end_time', None)
class PipelineRunStepDetails(msrest.serialization.Model):
"""PipelineRunStepDetails.
:ivar run_id:
:vartype run_id: str
:ivar target:
:vartype target: str
:ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar is_reused:
:vartype is_reused: bool
:ivar logs: This is a dictionary.
:vartype logs: dict[str, str]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, str]
:ivar snapshot_info:
:vartype snapshot_info: ~flow.models.SnapshotInfo
:ivar input_datasets:
:vartype input_datasets: list[~flow.models.DatasetLineage]
:ivar output_datasets:
:vartype output_datasets: list[~flow.models.OutputDatasetLineage]
"""
_validation = {
'input_datasets': {'unique': True},
'output_datasets': {'unique': True},
}
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'is_reused': {'key': 'isReused', 'type': 'bool'},
'logs': {'key': 'logs', 'type': '{str}'},
'outputs': {'key': 'outputs', 'type': '{str}'},
'snapshot_info': {'key': 'snapshotInfo', 'type': 'SnapshotInfo'},
'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'},
'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword target:
:paramtype target: str
:keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword is_reused:
:paramtype is_reused: bool
:keyword logs: This is a dictionary.
:paramtype logs: dict[str, str]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, str]
:keyword snapshot_info:
:paramtype snapshot_info: ~flow.models.SnapshotInfo
:keyword input_datasets:
:paramtype input_datasets: list[~flow.models.DatasetLineage]
:keyword output_datasets:
:paramtype output_datasets: list[~flow.models.OutputDatasetLineage]
"""
super(PipelineRunStepDetails, self).__init__(**kwargs)
self.run_id = kwargs.get('run_id', None)
self.target = kwargs.get('target', None)
self.status = kwargs.get('status', None)
self.status_detail = kwargs.get('status_detail', None)
self.parent_run_id = kwargs.get('parent_run_id', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.is_reused = kwargs.get('is_reused', None)
self.logs = kwargs.get('logs', None)
self.outputs = kwargs.get('outputs', None)
self.snapshot_info = kwargs.get('snapshot_info', None)
self.input_datasets = kwargs.get('input_datasets', None)
self.output_datasets = kwargs.get('output_datasets', None)
class PipelineRunSummary(msrest.serialization.Model):
"""PipelineRunSummary.
:ivar description:
:vartype description: str
:ivar display_name:
:vartype display_name: str
:ivar run_number:
:vartype run_number: int
:ivar status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:vartype status_code: str or ~flow.models.PipelineStatusCode
:ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype run_status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar graph_id:
:vartype graph_id: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar is_experiment_archived:
:vartype is_experiment_archived: bool
:ivar submitted_by:
:vartype submitted_by: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar step_tags: This is a dictionary.
:vartype step_tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar aether_start_time:
:vartype aether_start_time: ~datetime.datetime
:ivar aether_end_time:
:vartype aether_end_time: ~datetime.datetime
:ivar run_history_start_time:
:vartype run_history_start_time: ~datetime.datetime
:ivar run_history_end_time:
:vartype run_history_end_time: ~datetime.datetime
:ivar unique_child_run_compute_targets:
:vartype unique_child_run_compute_targets: list[str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_validation = {
'unique_child_run_compute_targets': {'unique': True},
}
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'run_number': {'key': 'runNumber', 'type': 'int'},
'status_code': {'key': 'statusCode', 'type': 'str'},
'run_status': {'key': 'runStatus', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'graph_id': {'key': 'graphId', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'},
'submitted_by': {'key': 'submittedBy', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'step_tags': {'key': 'stepTags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'aether_start_time': {'key': 'aetherStartTime', 'type': 'iso-8601'},
'aether_end_time': {'key': 'aetherEndTime', 'type': 'iso-8601'},
'run_history_start_time': {'key': 'runHistoryStartTime', 'type': 'iso-8601'},
'run_history_end_time': {'key': 'runHistoryEndTime', 'type': 'iso-8601'},
'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword description:
:paramtype description: str
:keyword display_name:
:paramtype display_name: str
:keyword run_number:
:paramtype run_number: int
:keyword status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:paramtype status_code: str or ~flow.models.PipelineStatusCode
:keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype run_status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword graph_id:
:paramtype graph_id: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword is_experiment_archived:
:paramtype is_experiment_archived: bool
:keyword submitted_by:
:paramtype submitted_by: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword step_tags: This is a dictionary.
:paramtype step_tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword aether_start_time:
:paramtype aether_start_time: ~datetime.datetime
:keyword aether_end_time:
:paramtype aether_end_time: ~datetime.datetime
:keyword run_history_start_time:
:paramtype run_history_start_time: ~datetime.datetime
:keyword run_history_end_time:
:paramtype run_history_end_time: ~datetime.datetime
:keyword unique_child_run_compute_targets:
:paramtype unique_child_run_compute_targets: list[str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineRunSummary, self).__init__(**kwargs)
self.description = kwargs.get('description', None)
self.display_name = kwargs.get('display_name', None)
self.run_number = kwargs.get('run_number', None)
self.status_code = kwargs.get('status_code', None)
self.run_status = kwargs.get('run_status', None)
self.status_detail = kwargs.get('status_detail', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.graph_id = kwargs.get('graph_id', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.is_experiment_archived = kwargs.get('is_experiment_archived', None)
self.submitted_by = kwargs.get('submitted_by', None)
self.tags = kwargs.get('tags', None)
self.step_tags = kwargs.get('step_tags', None)
self.properties = kwargs.get('properties', None)
self.aether_start_time = kwargs.get('aether_start_time', None)
self.aether_end_time = kwargs.get('aether_end_time', None)
self.run_history_start_time = kwargs.get('run_history_start_time', None)
self.run_history_end_time = kwargs.get('run_history_end_time', None)
self.unique_child_run_compute_targets = kwargs.get('unique_child_run_compute_targets', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PipelineStatus(msrest.serialization.Model):
"""PipelineStatus.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:vartype status_code: str or ~flow.models.PipelineStatusCode
:ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype run_status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar is_terminal_state:
:vartype is_terminal_state: bool
"""
_validation = {
'is_terminal_state': {'readonly': True},
}
_attribute_map = {
'status_code': {'key': 'statusCode', 'type': 'str'},
'run_status': {'key': 'runStatus', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'is_terminal_state': {'key': 'isTerminalState', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:paramtype status_code: str or ~flow.models.PipelineStatusCode
:keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype run_status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
"""
super(PipelineStatus, self).__init__(**kwargs)
self.status_code = kwargs.get('status_code', None)
self.run_status = kwargs.get('run_status', None)
self.status_detail = kwargs.get('status_detail', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.is_terminal_state = None
class PipelineStepRun(msrest.serialization.Model):
"""PipelineStepRun.
:ivar step_name:
:vartype step_name: str
:ivar run_number:
:vartype run_number: int
:ivar run_id:
:vartype run_id: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype run_status: str or ~flow.models.RunStatus
:ivar compute_target:
:vartype compute_target: str
:ivar compute_type:
:vartype compute_type: str
:ivar run_type:
:vartype run_type: str
:ivar step_type:
:vartype step_type: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar is_reused:
:vartype is_reused: bool
:ivar display_name:
:vartype display_name: str
"""
_attribute_map = {
'step_name': {'key': 'stepName', 'type': 'str'},
'run_number': {'key': 'runNumber', 'type': 'int'},
'run_id': {'key': 'runId', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'run_status': {'key': 'runStatus', 'type': 'str'},
'compute_target': {'key': 'computeTarget', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'step_type': {'key': 'stepType', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'is_reused': {'key': 'isReused', 'type': 'bool'},
'display_name': {'key': 'displayName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword step_name:
:paramtype step_name: str
:keyword run_number:
:paramtype run_number: int
:keyword run_id:
:paramtype run_id: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype run_status: str or ~flow.models.RunStatus
:keyword compute_target:
:paramtype compute_target: str
:keyword compute_type:
:paramtype compute_type: str
:keyword run_type:
:paramtype run_type: str
:keyword step_type:
:paramtype step_type: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword is_reused:
:paramtype is_reused: bool
:keyword display_name:
:paramtype display_name: str
"""
super(PipelineStepRun, self).__init__(**kwargs)
self.step_name = kwargs.get('step_name', None)
self.run_number = kwargs.get('run_number', None)
self.run_id = kwargs.get('run_id', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.run_status = kwargs.get('run_status', None)
self.compute_target = kwargs.get('compute_target', None)
self.compute_type = kwargs.get('compute_type', None)
self.run_type = kwargs.get('run_type', None)
self.step_type = kwargs.get('step_type', None)
self.tags = kwargs.get('tags', None)
self.is_reused = kwargs.get('is_reused', None)
self.display_name = kwargs.get('display_name', None)
class PipelineStepRunOutputs(msrest.serialization.Model):
"""PipelineStepRunOutputs.
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, str]
:ivar port_outputs: This is a dictionary.
:vartype port_outputs: dict[str, ~flow.models.PortOutputInfo]
"""
_attribute_map = {
'outputs': {'key': 'outputs', 'type': '{str}'},
'port_outputs': {'key': 'portOutputs', 'type': '{PortOutputInfo}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, str]
:keyword port_outputs: This is a dictionary.
:paramtype port_outputs: dict[str, ~flow.models.PortOutputInfo]
"""
super(PipelineStepRunOutputs, self).__init__(**kwargs)
self.outputs = kwargs.get('outputs', None)
self.port_outputs = kwargs.get('port_outputs', None)
class PipelineSubDraft(msrest.serialization.Model):
"""PipelineSubDraft.
:ivar parent_graph_draft_id:
:vartype parent_graph_draft_id: str
:ivar parent_node_id:
:vartype parent_node_id: str
:ivar graph_detail:
:vartype graph_detail: ~flow.models.PipelineRunGraphDetail
:ivar module_dto:
:vartype module_dto: ~flow.models.ModuleDto
:ivar name:
:vartype name: str
:ivar last_edited_by:
:vartype last_edited_by: str
:ivar created_by:
:vartype created_by: str
:ivar description:
:vartype description: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'parent_graph_draft_id': {'key': 'parentGraphDraftId', 'type': 'str'},
'parent_node_id': {'key': 'parentNodeId', 'type': 'str'},
'graph_detail': {'key': 'graphDetail', 'type': 'PipelineRunGraphDetail'},
'module_dto': {'key': 'moduleDto', 'type': 'ModuleDto'},
'name': {'key': 'name', 'type': 'str'},
'last_edited_by': {'key': 'lastEditedBy', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword parent_graph_draft_id:
:paramtype parent_graph_draft_id: str
:keyword parent_node_id:
:paramtype parent_node_id: str
:keyword graph_detail:
:paramtype graph_detail: ~flow.models.PipelineRunGraphDetail
:keyword module_dto:
:paramtype module_dto: ~flow.models.ModuleDto
:keyword name:
:paramtype name: str
:keyword last_edited_by:
:paramtype last_edited_by: str
:keyword created_by:
:paramtype created_by: str
:keyword description:
:paramtype description: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineSubDraft, self).__init__(**kwargs)
self.parent_graph_draft_id = kwargs.get('parent_graph_draft_id', None)
self.parent_node_id = kwargs.get('parent_node_id', None)
self.graph_detail = kwargs.get('graph_detail', None)
self.module_dto = kwargs.get('module_dto', None)
self.name = kwargs.get('name', None)
self.last_edited_by = kwargs.get('last_edited_by', None)
self.created_by = kwargs.get('created_by', None)
self.description = kwargs.get('description', None)
self.pipeline_type = kwargs.get('pipeline_type', None)
self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PolicyValidationResponse(msrest.serialization.Model):
"""PolicyValidationResponse.
:ivar error_response: The error response.
:vartype error_response: ~flow.models.ErrorResponse
:ivar next_action_interval_in_seconds:
:vartype next_action_interval_in_seconds: int
:ivar action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent", "SubmitFlowRun".
:vartype action_type: str or ~flow.models.ActionType
"""
_attribute_map = {
'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'},
'next_action_interval_in_seconds': {'key': 'nextActionIntervalInSeconds', 'type': 'int'},
'action_type': {'key': 'actionType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword error_response: The error response.
:paramtype error_response: ~flow.models.ErrorResponse
:keyword next_action_interval_in_seconds:
:paramtype next_action_interval_in_seconds: int
:keyword action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent", "SubmitFlowRun".
:paramtype action_type: str or ~flow.models.ActionType
"""
super(PolicyValidationResponse, self).__init__(**kwargs)
self.error_response = kwargs.get('error_response', None)
self.next_action_interval_in_seconds = kwargs.get('next_action_interval_in_seconds', None)
self.action_type = kwargs.get('action_type', None)
class PortInfo(msrest.serialization.Model):
"""PortInfo.
:ivar node_id:
:vartype node_id: str
:ivar port_name:
:vartype port_name: str
:ivar graph_port_name:
:vartype graph_port_name: str
:ivar is_parameter:
:vartype is_parameter: bool
:ivar web_service_port:
:vartype web_service_port: str
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'graph_port_name': {'key': 'graphPortName', 'type': 'str'},
'is_parameter': {'key': 'isParameter', 'type': 'bool'},
'web_service_port': {'key': 'webServicePort', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword graph_port_name:
:paramtype graph_port_name: str
:keyword is_parameter:
:paramtype is_parameter: bool
:keyword web_service_port:
:paramtype web_service_port: str
"""
super(PortInfo, self).__init__(**kwargs)
self.node_id = kwargs.get('node_id', None)
self.port_name = kwargs.get('port_name', None)
self.graph_port_name = kwargs.get('graph_port_name', None)
self.is_parameter = kwargs.get('is_parameter', None)
self.web_service_port = kwargs.get('web_service_port', None)
class PortOutputInfo(msrest.serialization.Model):
"""PortOutputInfo.
:ivar container_uri:
:vartype container_uri: str
:ivar relative_path:
:vartype relative_path: str
:ivar preview_params:
:vartype preview_params: str
:ivar model_output_path:
:vartype model_output_path: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake",
"AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS",
"AzureMySqlDatabase", "Custom", "Hdfs".
:vartype data_reference_type: str or ~flow.models.DataReferenceType
:ivar is_file:
:vartype is_file: bool
:ivar supported_actions:
:vartype supported_actions: list[str or ~flow.models.PortAction]
"""
_attribute_map = {
'container_uri': {'key': 'containerUri', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'preview_params': {'key': 'previewParams', 'type': 'str'},
'model_output_path': {'key': 'modelOutputPath', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_reference_type': {'key': 'dataReferenceType', 'type': 'str'},
'is_file': {'key': 'isFile', 'type': 'bool'},
'supported_actions': {'key': 'supportedActions', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword container_uri:
:paramtype container_uri: str
:keyword relative_path:
:paramtype relative_path: str
:keyword preview_params:
:paramtype preview_params: str
:keyword model_output_path:
:paramtype model_output_path: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake",
"AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS",
"AzureMySqlDatabase", "Custom", "Hdfs".
:paramtype data_reference_type: str or ~flow.models.DataReferenceType
:keyword is_file:
:paramtype is_file: bool
:keyword supported_actions:
:paramtype supported_actions: list[str or ~flow.models.PortAction]
"""
super(PortOutputInfo, self).__init__(**kwargs)
self.container_uri = kwargs.get('container_uri', None)
self.relative_path = kwargs.get('relative_path', None)
self.preview_params = kwargs.get('preview_params', None)
self.model_output_path = kwargs.get('model_output_path', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_reference_type = kwargs.get('data_reference_type', None)
self.is_file = kwargs.get('is_file', None)
self.supported_actions = kwargs.get('supported_actions', None)
class PriorityConfig(msrest.serialization.Model):
"""PriorityConfig.
:ivar job_priority:
:vartype job_priority: int
:ivar is_preemptible:
:vartype is_preemptible: bool
:ivar node_count_set:
:vartype node_count_set: list[int]
:ivar scale_interval:
:vartype scale_interval: int
"""
_attribute_map = {
'job_priority': {'key': 'jobPriority', 'type': 'int'},
'is_preemptible': {'key': 'isPreemptible', 'type': 'bool'},
'node_count_set': {'key': 'nodeCountSet', 'type': '[int]'},
'scale_interval': {'key': 'scaleInterval', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_priority:
:paramtype job_priority: int
:keyword is_preemptible:
:paramtype is_preemptible: bool
:keyword node_count_set:
:paramtype node_count_set: list[int]
:keyword scale_interval:
:paramtype scale_interval: int
"""
super(PriorityConfig, self).__init__(**kwargs)
self.job_priority = kwargs.get('job_priority', None)
self.is_preemptible = kwargs.get('is_preemptible', None)
self.node_count_set = kwargs.get('node_count_set', None)
self.scale_interval = kwargs.get('scale_interval', None)
class PriorityConfiguration(msrest.serialization.Model):
"""PriorityConfiguration.
:ivar cloud_priority:
:vartype cloud_priority: int
:ivar string_type_priority:
:vartype string_type_priority: str
"""
_attribute_map = {
'cloud_priority': {'key': 'cloudPriority', 'type': 'int'},
'string_type_priority': {'key': 'stringTypePriority', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword cloud_priority:
:paramtype cloud_priority: int
:keyword string_type_priority:
:paramtype string_type_priority: str
"""
super(PriorityConfiguration, self).__init__(**kwargs)
self.cloud_priority = kwargs.get('cloud_priority', None)
self.string_type_priority = kwargs.get('string_type_priority', None)
class PromoteDataSetRequest(msrest.serialization.Model):
"""PromoteDataSetRequest.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar module_node_id:
:vartype module_node_id: str
:ivar step_run_id:
:vartype step_run_id: str
:ivar output_port_name:
:vartype output_port_name: str
:ivar model_output_path:
:vartype model_output_path: str
:ivar data_type_id:
:vartype data_type_id: str
:ivar dataset_type:
:vartype dataset_type: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar output_relative_path:
:vartype output_relative_path: str
:ivar pipeline_run_id:
:vartype pipeline_run_id: str
:ivar root_pipeline_run_id:
:vartype root_pipeline_run_id: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar experiment_id:
:vartype experiment_id: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'module_node_id': {'key': 'moduleNodeId', 'type': 'str'},
'step_run_id': {'key': 'stepRunId', 'type': 'str'},
'output_port_name': {'key': 'outputPortName', 'type': 'str'},
'model_output_path': {'key': 'modelOutputPath', 'type': 'str'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
'dataset_type': {'key': 'datasetType', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'output_relative_path': {'key': 'outputRelativePath', 'type': 'str'},
'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
'root_pipeline_run_id': {'key': 'rootPipelineRunId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword module_node_id:
:paramtype module_node_id: str
:keyword step_run_id:
:paramtype step_run_id: str
:keyword output_port_name:
:paramtype output_port_name: str
:keyword model_output_path:
:paramtype model_output_path: str
:keyword data_type_id:
:paramtype data_type_id: str
:keyword dataset_type:
:paramtype dataset_type: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword output_relative_path:
:paramtype output_relative_path: str
:keyword pipeline_run_id:
:paramtype pipeline_run_id: str
:keyword root_pipeline_run_id:
:paramtype root_pipeline_run_id: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword experiment_id:
:paramtype experiment_id: str
"""
super(PromoteDataSetRequest, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.module_node_id = kwargs.get('module_node_id', None)
self.step_run_id = kwargs.get('step_run_id', None)
self.output_port_name = kwargs.get('output_port_name', None)
self.model_output_path = kwargs.get('model_output_path', None)
self.data_type_id = kwargs.get('data_type_id', None)
self.dataset_type = kwargs.get('dataset_type', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.output_relative_path = kwargs.get('output_relative_path', None)
self.pipeline_run_id = kwargs.get('pipeline_run_id', None)
self.root_pipeline_run_id = kwargs.get('root_pipeline_run_id', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.experiment_id = kwargs.get('experiment_id', None)
class ProviderEntity(msrest.serialization.Model):
"""ProviderEntity.
:ivar provider:
:vartype provider: str
:ivar module:
:vartype module: str
:ivar connection_type:
:vartype connection_type: list[str or ~flow.models.ConnectionType]
:ivar apis:
:vartype apis: list[~flow.models.ApiAndParameters]
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'module': {'key': 'module', 'type': 'str'},
'connection_type': {'key': 'connection_type', 'type': '[str]'},
'apis': {'key': 'apis', 'type': '[ApiAndParameters]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword provider:
:paramtype provider: str
:keyword module:
:paramtype module: str
:keyword connection_type:
:paramtype connection_type: list[str or ~flow.models.ConnectionType]
:keyword apis:
:paramtype apis: list[~flow.models.ApiAndParameters]
"""
super(ProviderEntity, self).__init__(**kwargs)
self.provider = kwargs.get('provider', None)
self.module = kwargs.get('module', None)
self.connection_type = kwargs.get('connection_type', None)
self.apis = kwargs.get('apis', None)
class PublishedPipeline(msrest.serialization.Model):
"""PublishedPipeline.
:ivar total_run_steps:
:vartype total_run_steps: int
:ivar total_runs:
:vartype total_runs: int
:ivar parameters: This is a dictionary.
:vartype parameters: dict[str, str]
:ivar data_set_definition_value_assignment: This is a dictionary.
:vartype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar rest_endpoint:
:vartype rest_endpoint: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar graph_id:
:vartype graph_id: str
:ivar published_date:
:vartype published_date: ~datetime.datetime
:ivar last_run_time:
:vartype last_run_time: ~datetime.datetime
:ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:vartype last_run_status: str or ~flow.models.PipelineRunStatusCode
:ivar published_by:
:vartype published_by: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar version:
:vartype version: str
:ivar is_default:
:vartype is_default: bool
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'total_run_steps': {'key': 'totalRunSteps', 'type': 'int'},
'total_runs': {'key': 'totalRuns', 'type': 'int'},
'parameters': {'key': 'parameters', 'type': '{str}'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': '{DataSetDefinitionValue}'},
'rest_endpoint': {'key': 'restEndpoint', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'graph_id': {'key': 'graphId', 'type': 'str'},
'published_date': {'key': 'publishedDate', 'type': 'iso-8601'},
'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'},
'last_run_status': {'key': 'lastRunStatus', 'type': 'str'},
'published_by': {'key': 'publishedBy', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'version': {'key': 'version', 'type': 'str'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword total_run_steps:
:paramtype total_run_steps: int
:keyword total_runs:
:paramtype total_runs: int
:keyword parameters: This is a dictionary.
:paramtype parameters: dict[str, str]
:keyword data_set_definition_value_assignment: This is a dictionary.
:paramtype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:keyword rest_endpoint:
:paramtype rest_endpoint: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword graph_id:
:paramtype graph_id: str
:keyword published_date:
:paramtype published_date: ~datetime.datetime
:keyword last_run_time:
:paramtype last_run_time: ~datetime.datetime
:keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed",
"Finished", "Canceled", "Queued", "CancelRequested".
:paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode
:keyword published_by:
:paramtype published_by: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword version:
:paramtype version: str
:keyword is_default:
:paramtype is_default: bool
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PublishedPipeline, self).__init__(**kwargs)
self.total_run_steps = kwargs.get('total_run_steps', None)
self.total_runs = kwargs.get('total_runs', None)
self.parameters = kwargs.get('parameters', None)
self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None)
self.rest_endpoint = kwargs.get('rest_endpoint', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.graph_id = kwargs.get('graph_id', None)
self.published_date = kwargs.get('published_date', None)
self.last_run_time = kwargs.get('last_run_time', None)
self.last_run_status = kwargs.get('last_run_status', None)
self.published_by = kwargs.get('published_by', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.version = kwargs.get('version', None)
self.is_default = kwargs.get('is_default', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PublishedPipelineSummary(msrest.serialization.Model):
"""PublishedPipelineSummary.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar graph_id:
:vartype graph_id: str
:ivar published_date:
:vartype published_date: ~datetime.datetime
:ivar last_run_time:
:vartype last_run_time: ~datetime.datetime
:ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:vartype last_run_status: str or ~flow.models.PipelineRunStatusCode
:ivar published_by:
:vartype published_by: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar version:
:vartype version: str
:ivar is_default:
:vartype is_default: bool
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'graph_id': {'key': 'graphId', 'type': 'str'},
'published_date': {'key': 'publishedDate', 'type': 'iso-8601'},
'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'},
'last_run_status': {'key': 'lastRunStatus', 'type': 'str'},
'published_by': {'key': 'publishedBy', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'version': {'key': 'version', 'type': 'str'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword graph_id:
:paramtype graph_id: str
:keyword published_date:
:paramtype published_date: ~datetime.datetime
:keyword last_run_time:
:paramtype last_run_time: ~datetime.datetime
:keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed",
"Finished", "Canceled", "Queued", "CancelRequested".
:paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode
:keyword published_by:
:paramtype published_by: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword version:
:paramtype version: str
:keyword is_default:
:paramtype is_default: bool
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PublishedPipelineSummary, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.graph_id = kwargs.get('graph_id', None)
self.published_date = kwargs.get('published_date', None)
self.last_run_time = kwargs.get('last_run_time', None)
self.last_run_status = kwargs.get('last_run_status', None)
self.published_by = kwargs.get('published_by', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.version = kwargs.get('version', None)
self.is_default = kwargs.get('is_default', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PythonInterfaceMapping(msrest.serialization.Model):
"""PythonInterfaceMapping.
:ivar name:
:vartype name: str
:ivar name_in_yaml:
:vartype name_in_yaml: str
:ivar argument_name:
:vartype argument_name: str
:ivar command_line_option:
:vartype command_line_option: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'name_in_yaml': {'key': 'nameInYaml', 'type': 'str'},
'argument_name': {'key': 'argumentName', 'type': 'str'},
'command_line_option': {'key': 'commandLineOption', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword name_in_yaml:
:paramtype name_in_yaml: str
:keyword argument_name:
:paramtype argument_name: str
:keyword command_line_option:
:paramtype command_line_option: str
"""
super(PythonInterfaceMapping, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.name_in_yaml = kwargs.get('name_in_yaml', None)
self.argument_name = kwargs.get('argument_name', None)
self.command_line_option = kwargs.get('command_line_option', None)
class PythonPyPiOrRCranLibraryDto(msrest.serialization.Model):
"""PythonPyPiOrRCranLibraryDto.
:ivar package:
:vartype package: str
:ivar repo:
:vartype repo: str
"""
_attribute_map = {
'package': {'key': 'package', 'type': 'str'},
'repo': {'key': 'repo', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword package:
:paramtype package: str
:keyword repo:
:paramtype repo: str
"""
super(PythonPyPiOrRCranLibraryDto, self).__init__(**kwargs)
self.package = kwargs.get('package', None)
self.repo = kwargs.get('repo', None)
class PythonSection(msrest.serialization.Model):
"""PythonSection.
:ivar interpreter_path:
:vartype interpreter_path: str
:ivar user_managed_dependencies:
:vartype user_managed_dependencies: bool
:ivar conda_dependencies: Anything.
:vartype conda_dependencies: any
:ivar base_conda_environment:
:vartype base_conda_environment: str
"""
_attribute_map = {
'interpreter_path': {'key': 'interpreterPath', 'type': 'str'},
'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'},
'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'},
'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword interpreter_path:
:paramtype interpreter_path: str
:keyword user_managed_dependencies:
:paramtype user_managed_dependencies: bool
:keyword conda_dependencies: Anything.
:paramtype conda_dependencies: any
:keyword base_conda_environment:
:paramtype base_conda_environment: str
"""
super(PythonSection, self).__init__(**kwargs)
self.interpreter_path = kwargs.get('interpreter_path', None)
self.user_managed_dependencies = kwargs.get('user_managed_dependencies', None)
self.conda_dependencies = kwargs.get('conda_dependencies', None)
self.base_conda_environment = kwargs.get('base_conda_environment', None)
class PyTorchConfiguration(msrest.serialization.Model):
"""PyTorchConfiguration.
:ivar communication_backend:
:vartype communication_backend: str
:ivar process_count:
:vartype process_count: int
"""
_attribute_map = {
'communication_backend': {'key': 'communicationBackend', 'type': 'str'},
'process_count': {'key': 'processCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword communication_backend:
:paramtype communication_backend: str
:keyword process_count:
:paramtype process_count: int
"""
super(PyTorchConfiguration, self).__init__(**kwargs)
self.communication_backend = kwargs.get('communication_backend', None)
self.process_count = kwargs.get('process_count', None)
class QueueingInfo(msrest.serialization.Model):
"""QueueingInfo.
:ivar code:
:vartype code: str
:ivar message:
:vartype message: str
:ivar last_refresh_timestamp:
:vartype last_refresh_timestamp: ~datetime.datetime
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'last_refresh_timestamp': {'key': 'lastRefreshTimestamp', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword message:
:paramtype message: str
:keyword last_refresh_timestamp:
:paramtype last_refresh_timestamp: ~datetime.datetime
"""
super(QueueingInfo, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.last_refresh_timestamp = kwargs.get('last_refresh_timestamp', None)
class RawComponentDto(msrest.serialization.Model):
"""RawComponentDto.
:ivar component_schema:
:vartype component_schema: str
:ivar is_anonymous:
:vartype is_anonymous: bool
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar type: Possible values include: "Unknown", "CommandComponent", "Command".
:vartype type: str or ~flow.models.ComponentType
:ivar component_type_version:
:vartype component_type_version: str
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar is_deterministic:
:vartype is_deterministic: bool
:ivar successful_return_code:
:vartype successful_return_code: str
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.ComponentInput]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.ComponentOutput]
:ivar command:
:vartype command: str
:ivar environment_name:
:vartype environment_name: str
:ivar environment_version:
:vartype environment_version: str
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar last_modified_by:
:vartype last_modified_by: ~flow.models.SchemaContractsCreatedBy
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar component_internal_id:
:vartype component_internal_id: str
"""
_attribute_map = {
'component_schema': {'key': 'componentSchema', 'type': 'str'},
'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'component_type_version': {'key': 'componentTypeVersion', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
'successful_return_code': {'key': 'successfulReturnCode', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '{ComponentInput}'},
'outputs': {'key': 'outputs', 'type': '{ComponentOutput}'},
'command': {'key': 'command', 'type': 'str'},
'environment_name': {'key': 'environmentName', 'type': 'str'},
'environment_version': {'key': 'environmentVersion', 'type': 'str'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'SchemaContractsCreatedBy'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'component_internal_id': {'key': 'componentInternalId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword component_schema:
:paramtype component_schema: str
:keyword is_anonymous:
:paramtype is_anonymous: bool
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword type: Possible values include: "Unknown", "CommandComponent", "Command".
:paramtype type: str or ~flow.models.ComponentType
:keyword component_type_version:
:paramtype component_type_version: str
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword is_deterministic:
:paramtype is_deterministic: bool
:keyword successful_return_code:
:paramtype successful_return_code: str
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.ComponentInput]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.ComponentOutput]
:keyword command:
:paramtype command: str
:keyword environment_name:
:paramtype environment_name: str
:keyword environment_version:
:paramtype environment_version: str
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword last_modified_by:
:paramtype last_modified_by: ~flow.models.SchemaContractsCreatedBy
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword component_internal_id:
:paramtype component_internal_id: str
"""
super(RawComponentDto, self).__init__(**kwargs)
self.component_schema = kwargs.get('component_schema', None)
self.is_anonymous = kwargs.get('is_anonymous', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
self.type = kwargs.get('type', None)
self.component_type_version = kwargs.get('component_type_version', None)
self.display_name = kwargs.get('display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.is_deterministic = kwargs.get('is_deterministic', None)
self.successful_return_code = kwargs.get('successful_return_code', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.command = kwargs.get('command', None)
self.environment_name = kwargs.get('environment_name', None)
self.environment_version = kwargs.get('environment_version', None)
self.snapshot_id = kwargs.get('snapshot_id', None)
self.created_by = kwargs.get('created_by', None)
self.last_modified_by = kwargs.get('last_modified_by', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
self.component_internal_id = kwargs.get('component_internal_id', None)
class RayConfiguration(msrest.serialization.Model):
"""RayConfiguration.
:ivar port:
:vartype port: int
:ivar address:
:vartype address: str
:ivar include_dashboard:
:vartype include_dashboard: bool
:ivar dashboard_port:
:vartype dashboard_port: int
:ivar head_node_additional_args:
:vartype head_node_additional_args: str
:ivar worker_node_additional_args:
:vartype worker_node_additional_args: str
"""
_attribute_map = {
'port': {'key': 'port', 'type': 'int'},
'address': {'key': 'address', 'type': 'str'},
'include_dashboard': {'key': 'includeDashboard', 'type': 'bool'},
'dashboard_port': {'key': 'dashboardPort', 'type': 'int'},
'head_node_additional_args': {'key': 'headNodeAdditionalArgs', 'type': 'str'},
'worker_node_additional_args': {'key': 'workerNodeAdditionalArgs', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword port:
:paramtype port: int
:keyword address:
:paramtype address: str
:keyword include_dashboard:
:paramtype include_dashboard: bool
:keyword dashboard_port:
:paramtype dashboard_port: int
:keyword head_node_additional_args:
:paramtype head_node_additional_args: str
:keyword worker_node_additional_args:
:paramtype worker_node_additional_args: str
"""
super(RayConfiguration, self).__init__(**kwargs)
self.port = kwargs.get('port', None)
self.address = kwargs.get('address', None)
self.include_dashboard = kwargs.get('include_dashboard', None)
self.dashboard_port = kwargs.get('dashboard_port', None)
self.head_node_additional_args = kwargs.get('head_node_additional_args', None)
self.worker_node_additional_args = kwargs.get('worker_node_additional_args', None)
class RCranPackage(msrest.serialization.Model):
"""RCranPackage.
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar repository:
:vartype repository: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'repository': {'key': 'repository', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword repository:
:paramtype repository: str
"""
super(RCranPackage, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
self.repository = kwargs.get('repository', None)
class RealTimeEndpoint(msrest.serialization.Model):
"""RealTimeEndpoint.
:ivar created_by:
:vartype created_by: str
:ivar kv_tags: Dictionary of :code:`<string>`.
:vartype kv_tags: dict[str, str]
:ivar state: Possible values include: "Transitioning", "Healthy", "Unhealthy", "Failed",
"Unschedulable".
:vartype state: str or ~flow.models.WebServiceState
:ivar error:
:vartype error: ~flow.models.ModelManagementErrorResponse
:ivar compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT",
"MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN".
:vartype compute_type: str or ~flow.models.ComputeEnvironmentType
:ivar image_id:
:vartype image_id: str
:ivar cpu:
:vartype cpu: float
:ivar memory_in_gb:
:vartype memory_in_gb: float
:ivar max_concurrent_requests_per_container:
:vartype max_concurrent_requests_per_container: int
:ivar num_replicas:
:vartype num_replicas: int
:ivar event_hub_enabled:
:vartype event_hub_enabled: bool
:ivar storage_enabled:
:vartype storage_enabled: bool
:ivar app_insights_enabled:
:vartype app_insights_enabled: bool
:ivar auto_scale_enabled:
:vartype auto_scale_enabled: bool
:ivar min_replicas:
:vartype min_replicas: int
:ivar max_replicas:
:vartype max_replicas: int
:ivar target_utilization:
:vartype target_utilization: int
:ivar refresh_period_in_seconds:
:vartype refresh_period_in_seconds: int
:ivar scoring_uri:
:vartype scoring_uri: str
:ivar deployment_status:
:vartype deployment_status: ~flow.models.AKSReplicaStatus
:ivar scoring_timeout_ms:
:vartype scoring_timeout_ms: int
:ivar auth_enabled:
:vartype auth_enabled: bool
:ivar aad_auth_enabled:
:vartype aad_auth_enabled: bool
:ivar region:
:vartype region: str
:ivar primary_key:
:vartype primary_key: str
:ivar secondary_key:
:vartype secondary_key: str
:ivar swagger_uri:
:vartype swagger_uri: str
:ivar linked_pipeline_draft_id:
:vartype linked_pipeline_draft_id: str
:ivar linked_pipeline_run_id:
:vartype linked_pipeline_run_id: str
:ivar warning:
:vartype warning: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar id:
:vartype id: str
:ivar created_time:
:vartype created_time: ~datetime.datetime
:ivar updated_time:
:vartype updated_time: ~datetime.datetime
:ivar compute_name:
:vartype compute_name: str
:ivar updated_by:
:vartype updated_by: str
"""
_attribute_map = {
'created_by': {'key': 'createdBy', 'type': 'str'},
'kv_tags': {'key': 'kvTags', 'type': '{str}'},
'state': {'key': 'state', 'type': 'str'},
'error': {'key': 'error', 'type': 'ModelManagementErrorResponse'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'image_id': {'key': 'imageId', 'type': 'str'},
'cpu': {'key': 'cpu', 'type': 'float'},
'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'},
'max_concurrent_requests_per_container': {'key': 'maxConcurrentRequestsPerContainer', 'type': 'int'},
'num_replicas': {'key': 'numReplicas', 'type': 'int'},
'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
'auto_scale_enabled': {'key': 'autoScaleEnabled', 'type': 'bool'},
'min_replicas': {'key': 'minReplicas', 'type': 'int'},
'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
'scoring_uri': {'key': 'scoringUri', 'type': 'str'},
'deployment_status': {'key': 'deploymentStatus', 'type': 'AKSReplicaStatus'},
'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
'aad_auth_enabled': {'key': 'aadAuthEnabled', 'type': 'bool'},
'region': {'key': 'region', 'type': 'str'},
'primary_key': {'key': 'primaryKey', 'type': 'str'},
'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
'swagger_uri': {'key': 'swaggerUri', 'type': 'str'},
'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'},
'linked_pipeline_run_id': {'key': 'linkedPipelineRunId', 'type': 'str'},
'warning': {'key': 'warning', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'updated_time': {'key': 'updatedTime', 'type': 'iso-8601'},
'compute_name': {'key': 'computeName', 'type': 'str'},
'updated_by': {'key': 'updatedBy', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword created_by:
:paramtype created_by: str
:keyword kv_tags: Dictionary of :code:`<string>`.
:paramtype kv_tags: dict[str, str]
:keyword state: Possible values include: "Transitioning", "Healthy", "Unhealthy", "Failed",
"Unschedulable".
:paramtype state: str or ~flow.models.WebServiceState
:keyword error:
:paramtype error: ~flow.models.ModelManagementErrorResponse
:keyword compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT",
"AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE",
"UNKNOWN".
:paramtype compute_type: str or ~flow.models.ComputeEnvironmentType
:keyword image_id:
:paramtype image_id: str
:keyword cpu:
:paramtype cpu: float
:keyword memory_in_gb:
:paramtype memory_in_gb: float
:keyword max_concurrent_requests_per_container:
:paramtype max_concurrent_requests_per_container: int
:keyword num_replicas:
:paramtype num_replicas: int
:keyword event_hub_enabled:
:paramtype event_hub_enabled: bool
:keyword storage_enabled:
:paramtype storage_enabled: bool
:keyword app_insights_enabled:
:paramtype app_insights_enabled: bool
:keyword auto_scale_enabled:
:paramtype auto_scale_enabled: bool
:keyword min_replicas:
:paramtype min_replicas: int
:keyword max_replicas:
:paramtype max_replicas: int
:keyword target_utilization:
:paramtype target_utilization: int
:keyword refresh_period_in_seconds:
:paramtype refresh_period_in_seconds: int
:keyword scoring_uri:
:paramtype scoring_uri: str
:keyword deployment_status:
:paramtype deployment_status: ~flow.models.AKSReplicaStatus
:keyword scoring_timeout_ms:
:paramtype scoring_timeout_ms: int
:keyword auth_enabled:
:paramtype auth_enabled: bool
:keyword aad_auth_enabled:
:paramtype aad_auth_enabled: bool
:keyword region:
:paramtype region: str
:keyword primary_key:
:paramtype primary_key: str
:keyword secondary_key:
:paramtype secondary_key: str
:keyword swagger_uri:
:paramtype swagger_uri: str
:keyword linked_pipeline_draft_id:
:paramtype linked_pipeline_draft_id: str
:keyword linked_pipeline_run_id:
:paramtype linked_pipeline_run_id: str
:keyword warning:
:paramtype warning: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword id:
:paramtype id: str
:keyword created_time:
:paramtype created_time: ~datetime.datetime
:keyword updated_time:
:paramtype updated_time: ~datetime.datetime
:keyword compute_name:
:paramtype compute_name: str
:keyword updated_by:
:paramtype updated_by: str
"""
super(RealTimeEndpoint, self).__init__(**kwargs)
self.created_by = kwargs.get('created_by', None)
self.kv_tags = kwargs.get('kv_tags', None)
self.state = kwargs.get('state', None)
self.error = kwargs.get('error', None)
self.compute_type = kwargs.get('compute_type', None)
self.image_id = kwargs.get('image_id', None)
self.cpu = kwargs.get('cpu', None)
self.memory_in_gb = kwargs.get('memory_in_gb', None)
self.max_concurrent_requests_per_container = kwargs.get('max_concurrent_requests_per_container', None)
self.num_replicas = kwargs.get('num_replicas', None)
self.event_hub_enabled = kwargs.get('event_hub_enabled', None)
self.storage_enabled = kwargs.get('storage_enabled', None)
self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
self.auto_scale_enabled = kwargs.get('auto_scale_enabled', None)
self.min_replicas = kwargs.get('min_replicas', None)
self.max_replicas = kwargs.get('max_replicas', None)
self.target_utilization = kwargs.get('target_utilization', None)
self.refresh_period_in_seconds = kwargs.get('refresh_period_in_seconds', None)
self.scoring_uri = kwargs.get('scoring_uri', None)
self.deployment_status = kwargs.get('deployment_status', None)
self.scoring_timeout_ms = kwargs.get('scoring_timeout_ms', None)
self.auth_enabled = kwargs.get('auth_enabled', None)
self.aad_auth_enabled = kwargs.get('aad_auth_enabled', None)
self.region = kwargs.get('region', None)
self.primary_key = kwargs.get('primary_key', None)
self.secondary_key = kwargs.get('secondary_key', None)
self.swagger_uri = kwargs.get('swagger_uri', None)
self.linked_pipeline_draft_id = kwargs.get('linked_pipeline_draft_id', None)
self.linked_pipeline_run_id = kwargs.get('linked_pipeline_run_id', None)
self.warning = kwargs.get('warning', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.id = kwargs.get('id', None)
self.created_time = kwargs.get('created_time', None)
self.updated_time = kwargs.get('updated_time', None)
self.compute_name = kwargs.get('compute_name', None)
self.updated_by = kwargs.get('updated_by', None)
class RealTimeEndpointInfo(msrest.serialization.Model):
"""RealTimeEndpointInfo.
:ivar web_service_inputs:
:vartype web_service_inputs: list[~flow.models.WebServicePort]
:ivar web_service_outputs:
:vartype web_service_outputs: list[~flow.models.WebServicePort]
:ivar deployments_info:
:vartype deployments_info: list[~flow.models.DeploymentInfo]
"""
_attribute_map = {
'web_service_inputs': {'key': 'webServiceInputs', 'type': '[WebServicePort]'},
'web_service_outputs': {'key': 'webServiceOutputs', 'type': '[WebServicePort]'},
'deployments_info': {'key': 'deploymentsInfo', 'type': '[DeploymentInfo]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword web_service_inputs:
:paramtype web_service_inputs: list[~flow.models.WebServicePort]
:keyword web_service_outputs:
:paramtype web_service_outputs: list[~flow.models.WebServicePort]
:keyword deployments_info:
:paramtype deployments_info: list[~flow.models.DeploymentInfo]
"""
super(RealTimeEndpointInfo, self).__init__(**kwargs)
self.web_service_inputs = kwargs.get('web_service_inputs', None)
self.web_service_outputs = kwargs.get('web_service_outputs', None)
self.deployments_info = kwargs.get('deployments_info', None)
class RealTimeEndpointStatus(msrest.serialization.Model):
"""RealTimeEndpointStatus.
:ivar last_operation: Possible values include: "Create", "Update", "Delete".
:vartype last_operation: str or ~flow.models.RealTimeEndpointOpCode
:ivar last_operation_status: Possible values include: "Ongoing", "Succeeded", "Failed",
"SucceededWithWarning".
:vartype last_operation_status: str or ~flow.models.RealTimeEndpointOpStatusCode
:ivar internal_step: Possible values include: "AboutToDeploy", "WaitAksComputeReady",
"RegisterModels", "CreateServiceFromModels", "UpdateServiceFromModels", "WaitServiceCreating",
"FetchServiceRelatedInfo", "TestWithSampleData", "AboutToDelete", "DeleteDeployment",
"DeleteAsset", "DeleteImage", "DeleteModel", "DeleteServiceRecord".
:vartype internal_step: str or ~flow.models.RealTimeEndpointInternalStepCode
:ivar status_detail:
:vartype status_detail: str
:ivar deployment_state:
:vartype deployment_state: str
:ivar service_id:
:vartype service_id: str
:ivar linked_pipeline_draft_id:
:vartype linked_pipeline_draft_id: str
"""
_attribute_map = {
'last_operation': {'key': 'lastOperation', 'type': 'str'},
'last_operation_status': {'key': 'lastOperationStatus', 'type': 'str'},
'internal_step': {'key': 'internalStep', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'deployment_state': {'key': 'deploymentState', 'type': 'str'},
'service_id': {'key': 'serviceId', 'type': 'str'},
'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword last_operation: Possible values include: "Create", "Update", "Delete".
:paramtype last_operation: str or ~flow.models.RealTimeEndpointOpCode
:keyword last_operation_status: Possible values include: "Ongoing", "Succeeded", "Failed",
"SucceededWithWarning".
:paramtype last_operation_status: str or ~flow.models.RealTimeEndpointOpStatusCode
:keyword internal_step: Possible values include: "AboutToDeploy", "WaitAksComputeReady",
"RegisterModels", "CreateServiceFromModels", "UpdateServiceFromModels", "WaitServiceCreating",
"FetchServiceRelatedInfo", "TestWithSampleData", "AboutToDelete", "DeleteDeployment",
"DeleteAsset", "DeleteImage", "DeleteModel", "DeleteServiceRecord".
:paramtype internal_step: str or ~flow.models.RealTimeEndpointInternalStepCode
:keyword status_detail:
:paramtype status_detail: str
:keyword deployment_state:
:paramtype deployment_state: str
:keyword service_id:
:paramtype service_id: str
:keyword linked_pipeline_draft_id:
:paramtype linked_pipeline_draft_id: str
"""
super(RealTimeEndpointStatus, self).__init__(**kwargs)
self.last_operation = kwargs.get('last_operation', None)
self.last_operation_status = kwargs.get('last_operation_status', None)
self.internal_step = kwargs.get('internal_step', None)
self.status_detail = kwargs.get('status_detail', None)
self.deployment_state = kwargs.get('deployment_state', None)
self.service_id = kwargs.get('service_id', None)
self.linked_pipeline_draft_id = kwargs.get('linked_pipeline_draft_id', None)
class RealTimeEndpointSummary(msrest.serialization.Model):
"""RealTimeEndpointSummary.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar id:
:vartype id: str
:ivar created_time:
:vartype created_time: ~datetime.datetime
:ivar updated_time:
:vartype updated_time: ~datetime.datetime
:ivar compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT",
"MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN".
:vartype compute_type: str or ~flow.models.ComputeEnvironmentType
:ivar compute_name:
:vartype compute_name: str
:ivar updated_by:
:vartype updated_by: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'updated_time': {'key': 'updatedTime', 'type': 'iso-8601'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'compute_name': {'key': 'computeName', 'type': 'str'},
'updated_by': {'key': 'updatedBy', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword id:
:paramtype id: str
:keyword created_time:
:paramtype created_time: ~datetime.datetime
:keyword updated_time:
:paramtype updated_time: ~datetime.datetime
:keyword compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT",
"AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE",
"UNKNOWN".
:paramtype compute_type: str or ~flow.models.ComputeEnvironmentType
:keyword compute_name:
:paramtype compute_name: str
:keyword updated_by:
:paramtype updated_by: str
"""
super(RealTimeEndpointSummary, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.id = kwargs.get('id', None)
self.created_time = kwargs.get('created_time', None)
self.updated_time = kwargs.get('updated_time', None)
self.compute_type = kwargs.get('compute_type', None)
self.compute_name = kwargs.get('compute_name', None)
self.updated_by = kwargs.get('updated_by', None)
class RealTimeEndpointTestRequest(msrest.serialization.Model):
"""RealTimeEndpointTestRequest.
:ivar end_point:
:vartype end_point: str
:ivar auth_key:
:vartype auth_key: str
:ivar payload:
:vartype payload: str
"""
_attribute_map = {
'end_point': {'key': 'endPoint', 'type': 'str'},
'auth_key': {'key': 'authKey', 'type': 'str'},
'payload': {'key': 'payload', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword end_point:
:paramtype end_point: str
:keyword auth_key:
:paramtype auth_key: str
:keyword payload:
:paramtype payload: str
"""
super(RealTimeEndpointTestRequest, self).__init__(**kwargs)
self.end_point = kwargs.get('end_point', None)
self.auth_key = kwargs.get('auth_key', None)
self.payload = kwargs.get('payload', None)
class Recurrence(msrest.serialization.Model):
"""Recurrence.
:ivar frequency: Possible values include: "Month", "Week", "Day", "Hour", "Minute".
:vartype frequency: str or ~flow.models.Frequency
:ivar interval:
:vartype interval: int
:ivar schedule:
:vartype schedule: ~flow.models.RecurrenceSchedule
:ivar end_time:
:vartype end_time: str
:ivar start_time:
:vartype start_time: str
:ivar time_zone:
:vartype time_zone: str
"""
_attribute_map = {
'frequency': {'key': 'frequency', 'type': 'str'},
'interval': {'key': 'interval', 'type': 'int'},
'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'},
'end_time': {'key': 'endTime', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'str'},
'time_zone': {'key': 'timeZone', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword frequency: Possible values include: "Month", "Week", "Day", "Hour", "Minute".
:paramtype frequency: str or ~flow.models.Frequency
:keyword interval:
:paramtype interval: int
:keyword schedule:
:paramtype schedule: ~flow.models.RecurrenceSchedule
:keyword end_time:
:paramtype end_time: str
:keyword start_time:
:paramtype start_time: str
:keyword time_zone:
:paramtype time_zone: str
"""
super(Recurrence, self).__init__(**kwargs)
self.frequency = kwargs.get('frequency', None)
self.interval = kwargs.get('interval', None)
self.schedule = kwargs.get('schedule', None)
self.end_time = kwargs.get('end_time', None)
self.start_time = kwargs.get('start_time', None)
self.time_zone = kwargs.get('time_zone', None)
class RecurrencePattern(msrest.serialization.Model):
"""RecurrencePattern.
:ivar hours:
:vartype hours: list[int]
:ivar minutes:
:vartype minutes: list[int]
:ivar weekdays:
:vartype weekdays: list[str or ~flow.models.Weekday]
"""
_attribute_map = {
'hours': {'key': 'hours', 'type': '[int]'},
'minutes': {'key': 'minutes', 'type': '[int]'},
'weekdays': {'key': 'weekdays', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword hours:
:paramtype hours: list[int]
:keyword minutes:
:paramtype minutes: list[int]
:keyword weekdays:
:paramtype weekdays: list[str or ~flow.models.Weekday]
"""
super(RecurrencePattern, self).__init__(**kwargs)
self.hours = kwargs.get('hours', None)
self.minutes = kwargs.get('minutes', None)
self.weekdays = kwargs.get('weekdays', None)
class RecurrenceSchedule(msrest.serialization.Model):
"""RecurrenceSchedule.
:ivar hours:
:vartype hours: list[int]
:ivar minutes:
:vartype minutes: list[int]
:ivar week_days:
:vartype week_days: list[str or ~flow.models.WeekDays]
:ivar month_days:
:vartype month_days: list[int]
"""
_attribute_map = {
'hours': {'key': 'hours', 'type': '[int]'},
'minutes': {'key': 'minutes', 'type': '[int]'},
'week_days': {'key': 'weekDays', 'type': '[str]'},
'month_days': {'key': 'monthDays', 'type': '[int]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword hours:
:paramtype hours: list[int]
:keyword minutes:
:paramtype minutes: list[int]
:keyword week_days:
:paramtype week_days: list[str or ~flow.models.WeekDays]
:keyword month_days:
:paramtype month_days: list[int]
"""
super(RecurrenceSchedule, self).__init__(**kwargs)
self.hours = kwargs.get('hours', None)
self.minutes = kwargs.get('minutes', None)
self.week_days = kwargs.get('week_days', None)
self.month_days = kwargs.get('month_days', None)
class RegenerateServiceKeysRequest(msrest.serialization.Model):
"""RegenerateServiceKeysRequest.
:ivar key_type: Possible values include: "Primary", "Secondary".
:vartype key_type: str or ~flow.models.KeyType
:ivar key_value:
:vartype key_value: str
"""
_attribute_map = {
'key_type': {'key': 'keyType', 'type': 'str'},
'key_value': {'key': 'keyValue', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword key_type: Possible values include: "Primary", "Secondary".
:paramtype key_type: str or ~flow.models.KeyType
:keyword key_value:
:paramtype key_value: str
"""
super(RegenerateServiceKeysRequest, self).__init__(**kwargs)
self.key_type = kwargs.get('key_type', None)
self.key_value = kwargs.get('key_value', None)
class RegisterComponentMetaInfo(msrest.serialization.Model):
"""RegisterComponentMetaInfo.
:ivar aml_module_name:
:vartype aml_module_name: str
:ivar name_only_display_info:
:vartype name_only_display_info: str
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar module_version_id:
:vartype module_version_id: str
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar component_registration_type: Possible values include: "Normal", "AnonymousAmlModule",
"AnonymousAmlModuleVersion", "ModuleEntityOnly".
:vartype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum
:ivar module_entity_from_yaml:
:vartype module_entity_from_yaml: ~flow.models.ModuleEntity
:ivar set_as_default_version:
:vartype set_as_default_version: bool
:ivar data_types_from_yaml:
:vartype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo]
:ivar data_type_mechanism: Possible values include: "ErrorWhenNotExisting",
"RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly".
:vartype data_type_mechanism: str or ~flow.models.DataTypeMechanism
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hashes:
:vartype identifier_hashes: ~flow.models.RegisterComponentMetaInfoIdentifierHashes
:ivar content_hash:
:vartype content_hash: str
:ivar extra_hash:
:vartype extra_hash: str
:ivar extra_hashes:
:vartype extra_hashes: ~flow.models.RegisterComponentMetaInfoExtraHashes
:ivar registration:
:vartype registration: bool
:ivar validate_only:
:vartype validate_only: bool
:ivar skip_workspace_related_check:
:vartype skip_workspace_related_check: bool
:ivar intellectual_property_protected_workspace_component_registration_allowed_publisher:
:vartype intellectual_property_protected_workspace_component_registration_allowed_publisher:
list[str]
:ivar system_managed_registration:
:vartype system_managed_registration: bool
:ivar allow_dup_name_between_input_and_ouput_port:
:vartype allow_dup_name_between_input_and_ouput_port: bool
:ivar module_source:
:vartype module_source: str
:ivar module_scope:
:vartype module_scope: str
:ivar module_additional_includes_count:
:vartype module_additional_includes_count: int
:ivar module_os_type:
:vartype module_os_type: str
:ivar module_codegen_by:
:vartype module_codegen_by: str
:ivar module_client_source:
:vartype module_client_source: str
:ivar module_is_builtin:
:vartype module_is_builtin: bool
:ivar module_register_event_extension_fields: Dictionary of :code:`<string>`.
:vartype module_register_event_extension_fields: dict[str, str]
"""
_attribute_map = {
'aml_module_name': {'key': 'amlModuleName', 'type': 'str'},
'name_only_display_info': {'key': 'nameOnlyDisplayInfo', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'module_version_id': {'key': 'moduleVersionId', 'type': 'str'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'component_registration_type': {'key': 'componentRegistrationType', 'type': 'str'},
'module_entity_from_yaml': {'key': 'moduleEntityFromYaml', 'type': 'ModuleEntity'},
'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'},
'data_types_from_yaml': {'key': 'dataTypesFromYaml', 'type': '[DataTypeCreationInfo]'},
'data_type_mechanism': {'key': 'dataTypeMechanism', 'type': 'str'},
'identifier_hash': {'key': 'identifierHash', 'type': 'str'},
'identifier_hashes': {'key': 'identifierHashes', 'type': 'RegisterComponentMetaInfoIdentifierHashes'},
'content_hash': {'key': 'contentHash', 'type': 'str'},
'extra_hash': {'key': 'extraHash', 'type': 'str'},
'extra_hashes': {'key': 'extraHashes', 'type': 'RegisterComponentMetaInfoExtraHashes'},
'registration': {'key': 'registration', 'type': 'bool'},
'validate_only': {'key': 'validateOnly', 'type': 'bool'},
'skip_workspace_related_check': {'key': 'skipWorkspaceRelatedCheck', 'type': 'bool'},
'intellectual_property_protected_workspace_component_registration_allowed_publisher': {'key': 'intellectualPropertyProtectedWorkspaceComponentRegistrationAllowedPublisher', 'type': '[str]'},
'system_managed_registration': {'key': 'systemManagedRegistration', 'type': 'bool'},
'allow_dup_name_between_input_and_ouput_port': {'key': 'allowDupNameBetweenInputAndOuputPort', 'type': 'bool'},
'module_source': {'key': 'moduleSource', 'type': 'str'},
'module_scope': {'key': 'moduleScope', 'type': 'str'},
'module_additional_includes_count': {'key': 'moduleAdditionalIncludesCount', 'type': 'int'},
'module_os_type': {'key': 'moduleOSType', 'type': 'str'},
'module_codegen_by': {'key': 'moduleCodegenBy', 'type': 'str'},
'module_client_source': {'key': 'moduleClientSource', 'type': 'str'},
'module_is_builtin': {'key': 'moduleIsBuiltin', 'type': 'bool'},
'module_register_event_extension_fields': {'key': 'moduleRegisterEventExtensionFields', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword aml_module_name:
:paramtype aml_module_name: str
:keyword name_only_display_info:
:paramtype name_only_display_info: str
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword module_version_id:
:paramtype module_version_id: str
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword component_registration_type: Possible values include: "Normal", "AnonymousAmlModule",
"AnonymousAmlModuleVersion", "ModuleEntityOnly".
:paramtype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum
:keyword module_entity_from_yaml:
:paramtype module_entity_from_yaml: ~flow.models.ModuleEntity
:keyword set_as_default_version:
:paramtype set_as_default_version: bool
:keyword data_types_from_yaml:
:paramtype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo]
:keyword data_type_mechanism: Possible values include: "ErrorWhenNotExisting",
"RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly".
:paramtype data_type_mechanism: str or ~flow.models.DataTypeMechanism
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hashes:
:paramtype identifier_hashes: ~flow.models.RegisterComponentMetaInfoIdentifierHashes
:keyword content_hash:
:paramtype content_hash: str
:keyword extra_hash:
:paramtype extra_hash: str
:keyword extra_hashes:
:paramtype extra_hashes: ~flow.models.RegisterComponentMetaInfoExtraHashes
:keyword registration:
:paramtype registration: bool
:keyword validate_only:
:paramtype validate_only: bool
:keyword skip_workspace_related_check:
:paramtype skip_workspace_related_check: bool
:keyword intellectual_property_protected_workspace_component_registration_allowed_publisher:
:paramtype intellectual_property_protected_workspace_component_registration_allowed_publisher:
list[str]
:keyword system_managed_registration:
:paramtype system_managed_registration: bool
:keyword allow_dup_name_between_input_and_ouput_port:
:paramtype allow_dup_name_between_input_and_ouput_port: bool
:keyword module_source:
:paramtype module_source: str
:keyword module_scope:
:paramtype module_scope: str
:keyword module_additional_includes_count:
:paramtype module_additional_includes_count: int
:keyword module_os_type:
:paramtype module_os_type: str
:keyword module_codegen_by:
:paramtype module_codegen_by: str
:keyword module_client_source:
:paramtype module_client_source: str
:keyword module_is_builtin:
:paramtype module_is_builtin: bool
:keyword module_register_event_extension_fields: Dictionary of :code:`<string>`.
:paramtype module_register_event_extension_fields: dict[str, str]
"""
super(RegisterComponentMetaInfo, self).__init__(**kwargs)
self.aml_module_name = kwargs.get('aml_module_name', None)
self.name_only_display_info = kwargs.get('name_only_display_info', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
self.module_version_id = kwargs.get('module_version_id', None)
self.snapshot_id = kwargs.get('snapshot_id', None)
self.component_registration_type = kwargs.get('component_registration_type', None)
self.module_entity_from_yaml = kwargs.get('module_entity_from_yaml', None)
self.set_as_default_version = kwargs.get('set_as_default_version', None)
self.data_types_from_yaml = kwargs.get('data_types_from_yaml', None)
self.data_type_mechanism = kwargs.get('data_type_mechanism', None)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hashes = kwargs.get('identifier_hashes', None)
self.content_hash = kwargs.get('content_hash', None)
self.extra_hash = kwargs.get('extra_hash', None)
self.extra_hashes = kwargs.get('extra_hashes', None)
self.registration = kwargs.get('registration', None)
self.validate_only = kwargs.get('validate_only', None)
self.skip_workspace_related_check = kwargs.get('skip_workspace_related_check', None)
self.intellectual_property_protected_workspace_component_registration_allowed_publisher = kwargs.get('intellectual_property_protected_workspace_component_registration_allowed_publisher', None)
self.system_managed_registration = kwargs.get('system_managed_registration', None)
self.allow_dup_name_between_input_and_ouput_port = kwargs.get('allow_dup_name_between_input_and_ouput_port', None)
self.module_source = kwargs.get('module_source', None)
self.module_scope = kwargs.get('module_scope', None)
self.module_additional_includes_count = kwargs.get('module_additional_includes_count', None)
self.module_os_type = kwargs.get('module_os_type', None)
self.module_codegen_by = kwargs.get('module_codegen_by', None)
self.module_client_source = kwargs.get('module_client_source', None)
self.module_is_builtin = kwargs.get('module_is_builtin', None)
self.module_register_event_extension_fields = kwargs.get('module_register_event_extension_fields', None)
class RegisterComponentMetaInfoExtraHashes(msrest.serialization.Model):
"""RegisterComponentMetaInfoExtraHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(RegisterComponentMetaInfoExtraHashes, self).__init__(**kwargs)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None)
class RegisterComponentMetaInfoIdentifierHashes(msrest.serialization.Model):
"""RegisterComponentMetaInfoIdentifierHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(RegisterComponentMetaInfoIdentifierHashes, self).__init__(**kwargs)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None)
class RegisteredDataSetReference(msrest.serialization.Model):
"""RegisteredDataSetReference.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
"""
super(RegisteredDataSetReference, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
class RegisterRegistryComponentMetaInfo(msrest.serialization.Model):
"""RegisterRegistryComponentMetaInfo.
:ivar registry_name:
:vartype registry_name: str
:ivar intellectual_property_publisher_information:
:vartype intellectual_property_publisher_information:
~flow.models.IntellectualPropertyPublisherInformation
:ivar blob_reference_data: This is a dictionary.
:vartype blob_reference_data: dict[str, ~flow.models.RegistryBlobReferenceData]
:ivar aml_module_name:
:vartype aml_module_name: str
:ivar name_only_display_info:
:vartype name_only_display_info: str
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar module_version_id:
:vartype module_version_id: str
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar component_registration_type: Possible values include: "Normal", "AnonymousAmlModule",
"AnonymousAmlModuleVersion", "ModuleEntityOnly".
:vartype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum
:ivar module_entity_from_yaml:
:vartype module_entity_from_yaml: ~flow.models.ModuleEntity
:ivar set_as_default_version:
:vartype set_as_default_version: bool
:ivar data_types_from_yaml:
:vartype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo]
:ivar data_type_mechanism: Possible values include: "ErrorWhenNotExisting",
"RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly".
:vartype data_type_mechanism: str or ~flow.models.DataTypeMechanism
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hashes:
:vartype identifier_hashes: ~flow.models.RegisterRegistryComponentMetaInfoIdentifierHashes
:ivar content_hash:
:vartype content_hash: str
:ivar extra_hash:
:vartype extra_hash: str
:ivar extra_hashes:
:vartype extra_hashes: ~flow.models.RegisterRegistryComponentMetaInfoExtraHashes
:ivar registration:
:vartype registration: bool
:ivar validate_only:
:vartype validate_only: bool
:ivar skip_workspace_related_check:
:vartype skip_workspace_related_check: bool
:ivar intellectual_property_protected_workspace_component_registration_allowed_publisher:
:vartype intellectual_property_protected_workspace_component_registration_allowed_publisher:
list[str]
:ivar system_managed_registration:
:vartype system_managed_registration: bool
:ivar allow_dup_name_between_input_and_ouput_port:
:vartype allow_dup_name_between_input_and_ouput_port: bool
:ivar module_source:
:vartype module_source: str
:ivar module_scope:
:vartype module_scope: str
:ivar module_additional_includes_count:
:vartype module_additional_includes_count: int
:ivar module_os_type:
:vartype module_os_type: str
:ivar module_codegen_by:
:vartype module_codegen_by: str
:ivar module_client_source:
:vartype module_client_source: str
:ivar module_is_builtin:
:vartype module_is_builtin: bool
:ivar module_register_event_extension_fields: Dictionary of :code:`<string>`.
:vartype module_register_event_extension_fields: dict[str, str]
"""
_attribute_map = {
'registry_name': {'key': 'registryName', 'type': 'str'},
'intellectual_property_publisher_information': {'key': 'intellectualPropertyPublisherInformation', 'type': 'IntellectualPropertyPublisherInformation'},
'blob_reference_data': {'key': 'blobReferenceData', 'type': '{RegistryBlobReferenceData}'},
'aml_module_name': {'key': 'amlModuleName', 'type': 'str'},
'name_only_display_info': {'key': 'nameOnlyDisplayInfo', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'module_version_id': {'key': 'moduleVersionId', 'type': 'str'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'component_registration_type': {'key': 'componentRegistrationType', 'type': 'str'},
'module_entity_from_yaml': {'key': 'moduleEntityFromYaml', 'type': 'ModuleEntity'},
'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'},
'data_types_from_yaml': {'key': 'dataTypesFromYaml', 'type': '[DataTypeCreationInfo]'},
'data_type_mechanism': {'key': 'dataTypeMechanism', 'type': 'str'},
'identifier_hash': {'key': 'identifierHash', 'type': 'str'},
'identifier_hashes': {'key': 'identifierHashes', 'type': 'RegisterRegistryComponentMetaInfoIdentifierHashes'},
'content_hash': {'key': 'contentHash', 'type': 'str'},
'extra_hash': {'key': 'extraHash', 'type': 'str'},
'extra_hashes': {'key': 'extraHashes', 'type': 'RegisterRegistryComponentMetaInfoExtraHashes'},
'registration': {'key': 'registration', 'type': 'bool'},
'validate_only': {'key': 'validateOnly', 'type': 'bool'},
'skip_workspace_related_check': {'key': 'skipWorkspaceRelatedCheck', 'type': 'bool'},
'intellectual_property_protected_workspace_component_registration_allowed_publisher': {'key': 'intellectualPropertyProtectedWorkspaceComponentRegistrationAllowedPublisher', 'type': '[str]'},
'system_managed_registration': {'key': 'systemManagedRegistration', 'type': 'bool'},
'allow_dup_name_between_input_and_ouput_port': {'key': 'allowDupNameBetweenInputAndOuputPort', 'type': 'bool'},
'module_source': {'key': 'moduleSource', 'type': 'str'},
'module_scope': {'key': 'moduleScope', 'type': 'str'},
'module_additional_includes_count': {'key': 'moduleAdditionalIncludesCount', 'type': 'int'},
'module_os_type': {'key': 'moduleOSType', 'type': 'str'},
'module_codegen_by': {'key': 'moduleCodegenBy', 'type': 'str'},
'module_client_source': {'key': 'moduleClientSource', 'type': 'str'},
'module_is_builtin': {'key': 'moduleIsBuiltin', 'type': 'bool'},
'module_register_event_extension_fields': {'key': 'moduleRegisterEventExtensionFields', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword registry_name:
:paramtype registry_name: str
:keyword intellectual_property_publisher_information:
:paramtype intellectual_property_publisher_information:
~flow.models.IntellectualPropertyPublisherInformation
:keyword blob_reference_data: This is a dictionary.
:paramtype blob_reference_data: dict[str, ~flow.models.RegistryBlobReferenceData]
:keyword aml_module_name:
:paramtype aml_module_name: str
:keyword name_only_display_info:
:paramtype name_only_display_info: str
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword module_version_id:
:paramtype module_version_id: str
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword component_registration_type: Possible values include: "Normal", "AnonymousAmlModule",
"AnonymousAmlModuleVersion", "ModuleEntityOnly".
:paramtype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum
:keyword module_entity_from_yaml:
:paramtype module_entity_from_yaml: ~flow.models.ModuleEntity
:keyword set_as_default_version:
:paramtype set_as_default_version: bool
:keyword data_types_from_yaml:
:paramtype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo]
:keyword data_type_mechanism: Possible values include: "ErrorWhenNotExisting",
"RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly".
:paramtype data_type_mechanism: str or ~flow.models.DataTypeMechanism
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hashes:
:paramtype identifier_hashes: ~flow.models.RegisterRegistryComponentMetaInfoIdentifierHashes
:keyword content_hash:
:paramtype content_hash: str
:keyword extra_hash:
:paramtype extra_hash: str
:keyword extra_hashes:
:paramtype extra_hashes: ~flow.models.RegisterRegistryComponentMetaInfoExtraHashes
:keyword registration:
:paramtype registration: bool
:keyword validate_only:
:paramtype validate_only: bool
:keyword skip_workspace_related_check:
:paramtype skip_workspace_related_check: bool
:keyword intellectual_property_protected_workspace_component_registration_allowed_publisher:
:paramtype intellectual_property_protected_workspace_component_registration_allowed_publisher:
list[str]
:keyword system_managed_registration:
:paramtype system_managed_registration: bool
:keyword allow_dup_name_between_input_and_ouput_port:
:paramtype allow_dup_name_between_input_and_ouput_port: bool
:keyword module_source:
:paramtype module_source: str
:keyword module_scope:
:paramtype module_scope: str
:keyword module_additional_includes_count:
:paramtype module_additional_includes_count: int
:keyword module_os_type:
:paramtype module_os_type: str
:keyword module_codegen_by:
:paramtype module_codegen_by: str
:keyword module_client_source:
:paramtype module_client_source: str
:keyword module_is_builtin:
:paramtype module_is_builtin: bool
:keyword module_register_event_extension_fields: Dictionary of :code:`<string>`.
:paramtype module_register_event_extension_fields: dict[str, str]
"""
super(RegisterRegistryComponentMetaInfo, self).__init__(**kwargs)
self.registry_name = kwargs.get('registry_name', None)
self.intellectual_property_publisher_information = kwargs.get('intellectual_property_publisher_information', None)
self.blob_reference_data = kwargs.get('blob_reference_data', None)
self.aml_module_name = kwargs.get('aml_module_name', None)
self.name_only_display_info = kwargs.get('name_only_display_info', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
self.module_version_id = kwargs.get('module_version_id', None)
self.snapshot_id = kwargs.get('snapshot_id', None)
self.component_registration_type = kwargs.get('component_registration_type', None)
self.module_entity_from_yaml = kwargs.get('module_entity_from_yaml', None)
self.set_as_default_version = kwargs.get('set_as_default_version', None)
self.data_types_from_yaml = kwargs.get('data_types_from_yaml', None)
self.data_type_mechanism = kwargs.get('data_type_mechanism', None)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hashes = kwargs.get('identifier_hashes', None)
self.content_hash = kwargs.get('content_hash', None)
self.extra_hash = kwargs.get('extra_hash', None)
self.extra_hashes = kwargs.get('extra_hashes', None)
self.registration = kwargs.get('registration', None)
self.validate_only = kwargs.get('validate_only', None)
self.skip_workspace_related_check = kwargs.get('skip_workspace_related_check', None)
self.intellectual_property_protected_workspace_component_registration_allowed_publisher = kwargs.get('intellectual_property_protected_workspace_component_registration_allowed_publisher', None)
self.system_managed_registration = kwargs.get('system_managed_registration', None)
self.allow_dup_name_between_input_and_ouput_port = kwargs.get('allow_dup_name_between_input_and_ouput_port', None)
self.module_source = kwargs.get('module_source', None)
self.module_scope = kwargs.get('module_scope', None)
self.module_additional_includes_count = kwargs.get('module_additional_includes_count', None)
self.module_os_type = kwargs.get('module_os_type', None)
self.module_codegen_by = kwargs.get('module_codegen_by', None)
self.module_client_source = kwargs.get('module_client_source', None)
self.module_is_builtin = kwargs.get('module_is_builtin', None)
self.module_register_event_extension_fields = kwargs.get('module_register_event_extension_fields', None)
class RegisterRegistryComponentMetaInfoExtraHashes(msrest.serialization.Model):
"""RegisterRegistryComponentMetaInfoExtraHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(RegisterRegistryComponentMetaInfoExtraHashes, self).__init__(**kwargs)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None)
class RegisterRegistryComponentMetaInfoIdentifierHashes(msrest.serialization.Model):
"""RegisterRegistryComponentMetaInfoIdentifierHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(RegisterRegistryComponentMetaInfoIdentifierHashes, self).__init__(**kwargs)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None)
class RegistrationOptions(msrest.serialization.Model):
"""RegistrationOptions.
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar dataset_registration_options:
:vartype dataset_registration_options: ~flow.models.DatasetRegistrationOptions
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'dataset_registration_options': {'key': 'datasetRegistrationOptions', 'type': 'DatasetRegistrationOptions'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword dataset_registration_options:
:paramtype dataset_registration_options: ~flow.models.DatasetRegistrationOptions
"""
super(RegistrationOptions, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.dataset_registration_options = kwargs.get('dataset_registration_options', None)
class RegistryBlobReferenceData(msrest.serialization.Model):
"""RegistryBlobReferenceData.
:ivar data_reference_id:
:vartype data_reference_id: str
:ivar data:
:vartype data: str
"""
_attribute_map = {
'data_reference_id': {'key': 'dataReferenceId', 'type': 'str'},
'data': {'key': 'data', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_reference_id:
:paramtype data_reference_id: str
:keyword data:
:paramtype data: str
"""
super(RegistryBlobReferenceData, self).__init__(**kwargs)
self.data_reference_id = kwargs.get('data_reference_id', None)
self.data = kwargs.get('data', None)
class RegistryIdentity(msrest.serialization.Model):
"""RegistryIdentity.
:ivar resource_id:
:vartype resource_id: str
:ivar client_id:
:vartype client_id: str
"""
_attribute_map = {
'resource_id': {'key': 'resourceId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword resource_id:
:paramtype resource_id: str
:keyword client_id:
:paramtype client_id: str
"""
super(RegistryIdentity, self).__init__(**kwargs)
self.resource_id = kwargs.get('resource_id', None)
self.client_id = kwargs.get('client_id', None)
class Relationship(msrest.serialization.Model):
"""Relationship.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar relation_type:
:vartype relation_type: str
:ivar target_entity_id:
:vartype target_entity_id: str
:ivar asset_id:
:vartype asset_id: str
:ivar entity_type:
:vartype entity_type: str
:ivar direction:
:vartype direction: str
:ivar entity_container_id:
:vartype entity_container_id: str
"""
_validation = {
'entity_type': {'readonly': True},
'entity_container_id': {'readonly': True},
}
_attribute_map = {
'relation_type': {'key': 'relationType', 'type': 'str'},
'target_entity_id': {'key': 'targetEntityId', 'type': 'str'},
'asset_id': {'key': 'assetId', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'direction': {'key': 'direction', 'type': 'str'},
'entity_container_id': {'key': 'entityContainerId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword relation_type:
:paramtype relation_type: str
:keyword target_entity_id:
:paramtype target_entity_id: str
:keyword asset_id:
:paramtype asset_id: str
:keyword direction:
:paramtype direction: str
"""
super(Relationship, self).__init__(**kwargs)
self.relation_type = kwargs.get('relation_type', None)
self.target_entity_id = kwargs.get('target_entity_id', None)
self.asset_id = kwargs.get('asset_id', None)
self.entity_type = None
self.direction = kwargs.get('direction', None)
self.entity_container_id = None
class RemoteDockerComputeInfo(msrest.serialization.Model):
"""RemoteDockerComputeInfo.
:ivar address:
:vartype address: str
:ivar username:
:vartype username: str
:ivar password:
:vartype password: str
:ivar private_key:
:vartype private_key: str
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'private_key': {'key': 'privateKey', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword address:
:paramtype address: str
:keyword username:
:paramtype username: str
:keyword password:
:paramtype password: str
:keyword private_key:
:paramtype private_key: str
"""
super(RemoteDockerComputeInfo, self).__init__(**kwargs)
self.address = kwargs.get('address', None)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.private_key = kwargs.get('private_key', None)
class ResourceConfig(msrest.serialization.Model):
"""ResourceConfig.
:ivar gpu_count:
:vartype gpu_count: int
:ivar cpu_count:
:vartype cpu_count: int
:ivar memory_request_in_gb:
:vartype memory_request_in_gb: int
"""
_attribute_map = {
'gpu_count': {'key': 'gpuCount', 'type': 'int'},
'cpu_count': {'key': 'cpuCount', 'type': 'int'},
'memory_request_in_gb': {'key': 'memoryRequestInGB', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword gpu_count:
:paramtype gpu_count: int
:keyword cpu_count:
:paramtype cpu_count: int
:keyword memory_request_in_gb:
:paramtype memory_request_in_gb: int
"""
super(ResourceConfig, self).__init__(**kwargs)
self.gpu_count = kwargs.get('gpu_count', None)
self.cpu_count = kwargs.get('cpu_count', None)
self.memory_request_in_gb = kwargs.get('memory_request_in_gb', None)
class ResourceConfiguration(msrest.serialization.Model):
"""ResourceConfiguration.
:ivar gpu_count:
:vartype gpu_count: int
:ivar cpu_count:
:vartype cpu_count: int
:ivar memory_request_in_gb:
:vartype memory_request_in_gb: int
"""
_attribute_map = {
'gpu_count': {'key': 'gpuCount', 'type': 'int'},
'cpu_count': {'key': 'cpuCount', 'type': 'int'},
'memory_request_in_gb': {'key': 'memoryRequestInGB', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword gpu_count:
:paramtype gpu_count: int
:keyword cpu_count:
:paramtype cpu_count: int
:keyword memory_request_in_gb:
:paramtype memory_request_in_gb: int
"""
super(ResourceConfiguration, self).__init__(**kwargs)
self.gpu_count = kwargs.get('gpu_count', None)
self.cpu_count = kwargs.get('cpu_count', None)
self.memory_request_in_gb = kwargs.get('memory_request_in_gb', None)
class ResourcesSetting(msrest.serialization.Model):
"""ResourcesSetting.
:ivar instance_size:
:vartype instance_size: str
:ivar spark_version:
:vartype spark_version: str
"""
_attribute_map = {
'instance_size': {'key': 'instanceSize', 'type': 'str'},
'spark_version': {'key': 'sparkVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword instance_size:
:paramtype instance_size: str
:keyword spark_version:
:paramtype spark_version: str
"""
super(ResourcesSetting, self).__init__(**kwargs)
self.instance_size = kwargs.get('instance_size', None)
self.spark_version = kwargs.get('spark_version', None)
class RetrieveToolFuncResultRequest(msrest.serialization.Model):
"""RetrieveToolFuncResultRequest.
:ivar func_path:
:vartype func_path: str
:ivar func_kwargs: This is a dictionary.
:vartype func_kwargs: dict[str, any]
:ivar func_call_scenario: Possible values include: "generated_by", "reverse_generated_by",
"dynamic_list".
:vartype func_call_scenario: str or ~flow.models.ToolFuncCallScenario
"""
_attribute_map = {
'func_path': {'key': 'func_path', 'type': 'str'},
'func_kwargs': {'key': 'func_kwargs', 'type': '{object}'},
'func_call_scenario': {'key': 'func_call_scenario', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword func_path:
:paramtype func_path: str
:keyword func_kwargs: This is a dictionary.
:paramtype func_kwargs: dict[str, any]
:keyword func_call_scenario: Possible values include: "generated_by", "reverse_generated_by",
"dynamic_list".
:paramtype func_call_scenario: str or ~flow.models.ToolFuncCallScenario
"""
super(RetrieveToolFuncResultRequest, self).__init__(**kwargs)
self.func_path = kwargs.get('func_path', None)
self.func_kwargs = kwargs.get('func_kwargs', None)
self.func_call_scenario = kwargs.get('func_call_scenario', None)
class RetryConfiguration(msrest.serialization.Model):
"""RetryConfiguration.
:ivar max_retry_count:
:vartype max_retry_count: int
"""
_attribute_map = {
'max_retry_count': {'key': 'maxRetryCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_retry_count:
:paramtype max_retry_count: int
"""
super(RetryConfiguration, self).__init__(**kwargs)
self.max_retry_count = kwargs.get('max_retry_count', None)
class RGitHubPackage(msrest.serialization.Model):
"""RGitHubPackage.
:ivar repository:
:vartype repository: str
:ivar auth_token:
:vartype auth_token: str
"""
_attribute_map = {
'repository': {'key': 'repository', 'type': 'str'},
'auth_token': {'key': 'authToken', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword repository:
:paramtype repository: str
:keyword auth_token:
:paramtype auth_token: str
"""
super(RGitHubPackage, self).__init__(**kwargs)
self.repository = kwargs.get('repository', None)
self.auth_token = kwargs.get('auth_token', None)
class RootError(msrest.serialization.Model):
"""The root error.
:ivar code: The service-defined error code. Supported error codes: ServiceError, UserError,
ValidationError, AzureStorageError, TransientError, RequestThrottled.
:vartype code: str
:ivar severity: The Severity of error.
:vartype severity: int
:ivar message: A human-readable representation of the error.
:vartype message: str
:ivar message_format: An unformatted version of the message with no variable substitution.
:vartype message_format: str
:ivar message_parameters: Value substitutions corresponding to the contents of MessageFormat.
:vartype message_parameters: dict[str, str]
:ivar reference_code: This code can optionally be set by the system generating the error.
It should be used to classify the problem and identify the module and code area where the
failure occured.
:vartype reference_code: str
:ivar details_uri: A URI which points to more details about the context of the error.
:vartype details_uri: str
:ivar target: The target of the error (e.g., the name of the property in error).
:vartype target: str
:ivar details: The related errors that occurred during the request.
:vartype details: list[~flow.models.RootError]
:ivar inner_error: A nested structure of errors.
:vartype inner_error: ~flow.models.InnerErrorResponse
:ivar additional_info: The error additional info.
:vartype additional_info: list[~flow.models.ErrorAdditionalInfo]
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'severity': {'key': 'severity', 'type': 'int'},
'message': {'key': 'message', 'type': 'str'},
'message_format': {'key': 'messageFormat', 'type': 'str'},
'message_parameters': {'key': 'messageParameters', 'type': '{str}'},
'reference_code': {'key': 'referenceCode', 'type': 'str'},
'details_uri': {'key': 'detailsUri', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[RootError]'},
'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'},
'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code: The service-defined error code. Supported error codes: ServiceError, UserError,
ValidationError, AzureStorageError, TransientError, RequestThrottled.
:paramtype code: str
:keyword severity: The Severity of error.
:paramtype severity: int
:keyword message: A human-readable representation of the error.
:paramtype message: str
:keyword message_format: An unformatted version of the message with no variable substitution.
:paramtype message_format: str
:keyword message_parameters: Value substitutions corresponding to the contents of
MessageFormat.
:paramtype message_parameters: dict[str, str]
:keyword reference_code: This code can optionally be set by the system generating the error.
It should be used to classify the problem and identify the module and code area where the
failure occured.
:paramtype reference_code: str
:keyword details_uri: A URI which points to more details about the context of the error.
:paramtype details_uri: str
:keyword target: The target of the error (e.g., the name of the property in error).
:paramtype target: str
:keyword details: The related errors that occurred during the request.
:paramtype details: list[~flow.models.RootError]
:keyword inner_error: A nested structure of errors.
:paramtype inner_error: ~flow.models.InnerErrorResponse
:keyword additional_info: The error additional info.
:paramtype additional_info: list[~flow.models.ErrorAdditionalInfo]
"""
super(RootError, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.severity = kwargs.get('severity', None)
self.message = kwargs.get('message', None)
self.message_format = kwargs.get('message_format', None)
self.message_parameters = kwargs.get('message_parameters', None)
self.reference_code = kwargs.get('reference_code', None)
self.details_uri = kwargs.get('details_uri', None)
self.target = kwargs.get('target', None)
self.details = kwargs.get('details', None)
self.inner_error = kwargs.get('inner_error', None)
self.additional_info = kwargs.get('additional_info', None)
class RSection(msrest.serialization.Model):
"""RSection.
:ivar r_version:
:vartype r_version: str
:ivar user_managed:
:vartype user_managed: bool
:ivar rscript_path:
:vartype rscript_path: str
:ivar snapshot_date:
:vartype snapshot_date: str
:ivar cran_packages:
:vartype cran_packages: list[~flow.models.RCranPackage]
:ivar git_hub_packages:
:vartype git_hub_packages: list[~flow.models.RGitHubPackage]
:ivar custom_url_packages:
:vartype custom_url_packages: list[str]
:ivar bio_conductor_packages:
:vartype bio_conductor_packages: list[str]
"""
_attribute_map = {
'r_version': {'key': 'rVersion', 'type': 'str'},
'user_managed': {'key': 'userManaged', 'type': 'bool'},
'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackage]'},
'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword r_version:
:paramtype r_version: str
:keyword user_managed:
:paramtype user_managed: bool
:keyword rscript_path:
:paramtype rscript_path: str
:keyword snapshot_date:
:paramtype snapshot_date: str
:keyword cran_packages:
:paramtype cran_packages: list[~flow.models.RCranPackage]
:keyword git_hub_packages:
:paramtype git_hub_packages: list[~flow.models.RGitHubPackage]
:keyword custom_url_packages:
:paramtype custom_url_packages: list[str]
:keyword bio_conductor_packages:
:paramtype bio_conductor_packages: list[str]
"""
super(RSection, self).__init__(**kwargs)
self.r_version = kwargs.get('r_version', None)
self.user_managed = kwargs.get('user_managed', None)
self.rscript_path = kwargs.get('rscript_path', None)
self.snapshot_date = kwargs.get('snapshot_date', None)
self.cran_packages = kwargs.get('cran_packages', None)
self.git_hub_packages = kwargs.get('git_hub_packages', None)
self.custom_url_packages = kwargs.get('custom_url_packages', None)
self.bio_conductor_packages = kwargs.get('bio_conductor_packages', None)
class RunAnnotations(msrest.serialization.Model):
"""RunAnnotations.
:ivar display_name:
:vartype display_name: str
:ivar status:
:vartype status: str
:ivar primary_metric_name:
:vartype primary_metric_name: str
:ivar estimated_cost:
:vartype estimated_cost: float
:ivar primary_metric_summary:
:vartype primary_metric_summary: ~flow.models.RunIndexMetricSummary
:ivar metrics: Dictionary of :code:`<RunIndexMetricSummarySystemObject>`.
:vartype metrics: dict[str, ~flow.models.RunIndexMetricSummarySystemObject]
:ivar parameters: Dictionary of :code:`<any>`.
:vartype parameters: dict[str, any]
:ivar settings: Dictionary of :code:`<string>`.
:vartype settings: dict[str, str]
:ivar modified_time:
:vartype modified_time: ~datetime.datetime
:ivar retain_for_lifetime_of_workspace:
:vartype retain_for_lifetime_of_workspace: bool
:ivar error:
:vartype error: ~flow.models.IndexedErrorResponse
:ivar resource_metric_summary:
:vartype resource_metric_summary: ~flow.models.RunIndexResourceMetricSummary
:ivar job_cost:
:vartype job_cost: ~flow.models.JobCost
:ivar compute_duration:
:vartype compute_duration: str
:ivar compute_duration_milliseconds:
:vartype compute_duration_milliseconds: float
:ivar effective_start_time_utc:
:vartype effective_start_time_utc: ~datetime.datetime
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar archived:
:vartype archived: bool
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
'estimated_cost': {'key': 'estimatedCost', 'type': 'float'},
'primary_metric_summary': {'key': 'primaryMetricSummary', 'type': 'RunIndexMetricSummary'},
'metrics': {'key': 'metrics', 'type': '{RunIndexMetricSummarySystemObject}'},
'parameters': {'key': 'parameters', 'type': '{object}'},
'settings': {'key': 'settings', 'type': '{str}'},
'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'},
'error': {'key': 'error', 'type': 'IndexedErrorResponse'},
'resource_metric_summary': {'key': 'resourceMetricSummary', 'type': 'RunIndexResourceMetricSummary'},
'job_cost': {'key': 'jobCost', 'type': 'JobCost'},
'compute_duration': {'key': 'computeDuration', 'type': 'str'},
'compute_duration_milliseconds': {'key': 'computeDurationMilliseconds', 'type': 'float'},
'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'archived': {'key': 'archived', 'type': 'bool'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword display_name:
:paramtype display_name: str
:keyword status:
:paramtype status: str
:keyword primary_metric_name:
:paramtype primary_metric_name: str
:keyword estimated_cost:
:paramtype estimated_cost: float
:keyword primary_metric_summary:
:paramtype primary_metric_summary: ~flow.models.RunIndexMetricSummary
:keyword metrics: Dictionary of :code:`<RunIndexMetricSummarySystemObject>`.
:paramtype metrics: dict[str, ~flow.models.RunIndexMetricSummarySystemObject]
:keyword parameters: Dictionary of :code:`<any>`.
:paramtype parameters: dict[str, any]
:keyword settings: Dictionary of :code:`<string>`.
:paramtype settings: dict[str, str]
:keyword modified_time:
:paramtype modified_time: ~datetime.datetime
:keyword retain_for_lifetime_of_workspace:
:paramtype retain_for_lifetime_of_workspace: bool
:keyword error:
:paramtype error: ~flow.models.IndexedErrorResponse
:keyword resource_metric_summary:
:paramtype resource_metric_summary: ~flow.models.RunIndexResourceMetricSummary
:keyword job_cost:
:paramtype job_cost: ~flow.models.JobCost
:keyword compute_duration:
:paramtype compute_duration: str
:keyword compute_duration_milliseconds:
:paramtype compute_duration_milliseconds: float
:keyword effective_start_time_utc:
:paramtype effective_start_time_utc: ~datetime.datetime
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword archived:
:paramtype archived: bool
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
"""
super(RunAnnotations, self).__init__(**kwargs)
self.display_name = kwargs.get('display_name', None)
self.status = kwargs.get('status', None)
self.primary_metric_name = kwargs.get('primary_metric_name', None)
self.estimated_cost = kwargs.get('estimated_cost', None)
self.primary_metric_summary = kwargs.get('primary_metric_summary', None)
self.metrics = kwargs.get('metrics', None)
self.parameters = kwargs.get('parameters', None)
self.settings = kwargs.get('settings', None)
self.modified_time = kwargs.get('modified_time', None)
self.retain_for_lifetime_of_workspace = kwargs.get('retain_for_lifetime_of_workspace', None)
self.error = kwargs.get('error', None)
self.resource_metric_summary = kwargs.get('resource_metric_summary', None)
self.job_cost = kwargs.get('job_cost', None)
self.compute_duration = kwargs.get('compute_duration', None)
self.compute_duration_milliseconds = kwargs.get('compute_duration_milliseconds', None)
self.effective_start_time_utc = kwargs.get('effective_start_time_utc', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.archived = kwargs.get('archived', None)
self.tags = kwargs.get('tags', None)
class RunCommandsCommandResult(msrest.serialization.Model):
"""RunCommandsCommandResult.
:ivar command:
:vartype command: str
:ivar arguments:
:vartype arguments: list[str]
:ivar exit_code:
:vartype exit_code: int
:ivar stdout:
:vartype stdout: str
:ivar stderr:
:vartype stderr: str
"""
_attribute_map = {
'command': {'key': 'command', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': '[str]'},
'exit_code': {'key': 'exit_code', 'type': 'int'},
'stdout': {'key': 'stdout', 'type': 'str'},
'stderr': {'key': 'stderr', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword command:
:paramtype command: str
:keyword arguments:
:paramtype arguments: list[str]
:keyword exit_code:
:paramtype exit_code: int
:keyword stdout:
:paramtype stdout: str
:keyword stderr:
:paramtype stderr: str
"""
super(RunCommandsCommandResult, self).__init__(**kwargs)
self.command = kwargs.get('command', None)
self.arguments = kwargs.get('arguments', None)
self.exit_code = kwargs.get('exit_code', None)
self.stdout = kwargs.get('stdout', None)
self.stderr = kwargs.get('stderr', None)
class RunConfiguration(msrest.serialization.Model):
"""RunConfiguration.
:ivar script:
:vartype script: str
:ivar script_type: Possible values include: "Python", "Notebook".
:vartype script_type: str or ~flow.models.ScriptType
:ivar command:
:vartype command: str
:ivar use_absolute_path:
:vartype use_absolute_path: bool
:ivar arguments:
:vartype arguments: list[str]
:ivar framework: Possible values include: "Python", "PySpark", "Cntk", "TensorFlow", "PyTorch",
"PySparkInteractive", "R".
:vartype framework: str or ~flow.models.Framework
:ivar communicator: Possible values include: "None", "ParameterServer", "Gloo", "Mpi", "Nccl",
"ParallelTask".
:vartype communicator: str or ~flow.models.Communicator
:ivar target:
:vartype target: str
:ivar auto_cluster_compute_specification:
:vartype auto_cluster_compute_specification: ~flow.models.AutoClusterComputeSpecification
:ivar data_references: Dictionary of :code:`<DataReferenceConfiguration>`.
:vartype data_references: dict[str, ~flow.models.DataReferenceConfiguration]
:ivar data: Dictionary of :code:`<Data>`.
:vartype data: dict[str, ~flow.models.Data]
:ivar input_assets: Dictionary of :code:`<InputAsset>`.
:vartype input_assets: dict[str, ~flow.models.InputAsset]
:ivar output_data: Dictionary of :code:`<OutputData>`.
:vartype output_data: dict[str, ~flow.models.OutputData]
:ivar datacaches:
:vartype datacaches: list[~flow.models.DatacacheConfiguration]
:ivar job_name:
:vartype job_name: str
:ivar max_run_duration_seconds:
:vartype max_run_duration_seconds: long
:ivar node_count:
:vartype node_count: int
:ivar max_node_count:
:vartype max_node_count: int
:ivar instance_types:
:vartype instance_types: list[str]
:ivar priority:
:vartype priority: int
:ivar credential_passthrough:
:vartype credential_passthrough: bool
:ivar identity:
:vartype identity: ~flow.models.IdentityConfiguration
:ivar environment:
:vartype environment: ~flow.models.EnvironmentDefinition
:ivar history:
:vartype history: ~flow.models.HistoryConfiguration
:ivar spark:
:vartype spark: ~flow.models.SparkConfiguration
:ivar parallel_task:
:vartype parallel_task: ~flow.models.ParallelTaskConfiguration
:ivar tensorflow:
:vartype tensorflow: ~flow.models.TensorflowConfiguration
:ivar mpi:
:vartype mpi: ~flow.models.MpiConfiguration
:ivar py_torch:
:vartype py_torch: ~flow.models.PyTorchConfiguration
:ivar ray:
:vartype ray: ~flow.models.RayConfiguration
:ivar hdi:
:vartype hdi: ~flow.models.HdiConfiguration
:ivar docker:
:vartype docker: ~flow.models.DockerConfiguration
:ivar command_return_code_config:
:vartype command_return_code_config: ~flow.models.CommandReturnCodeConfig
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:vartype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:ivar parameters:
:vartype parameters: list[~flow.models.ParameterDefinition]
:ivar autologger_settings:
:vartype autologger_settings: ~flow.models.AutologgerSettings
:ivar data_bricks:
:vartype data_bricks: ~flow.models.DatabricksConfiguration
:ivar training_diagnostic_config:
:vartype training_diagnostic_config: ~flow.models.TrainingDiagnosticConfiguration
:ivar secrets_configuration: Dictionary of :code:`<SecretConfiguration>`.
:vartype secrets_configuration: dict[str, ~flow.models.SecretConfiguration]
"""
_attribute_map = {
'script': {'key': 'script', 'type': 'str'},
'script_type': {'key': 'scriptType', 'type': 'str'},
'command': {'key': 'command', 'type': 'str'},
'use_absolute_path': {'key': 'useAbsolutePath', 'type': 'bool'},
'arguments': {'key': 'arguments', 'type': '[str]'},
'framework': {'key': 'framework', 'type': 'str'},
'communicator': {'key': 'communicator', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'auto_cluster_compute_specification': {'key': 'autoClusterComputeSpecification', 'type': 'AutoClusterComputeSpecification'},
'data_references': {'key': 'dataReferences', 'type': '{DataReferenceConfiguration}'},
'data': {'key': 'data', 'type': '{Data}'},
'input_assets': {'key': 'inputAssets', 'type': '{InputAsset}'},
'output_data': {'key': 'outputData', 'type': '{OutputData}'},
'datacaches': {'key': 'datacaches', 'type': '[DatacacheConfiguration]'},
'job_name': {'key': 'jobName', 'type': 'str'},
'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'long'},
'node_count': {'key': 'nodeCount', 'type': 'int'},
'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
'instance_types': {'key': 'instanceTypes', 'type': '[str]'},
'priority': {'key': 'priority', 'type': 'int'},
'credential_passthrough': {'key': 'credentialPassthrough', 'type': 'bool'},
'identity': {'key': 'identity', 'type': 'IdentityConfiguration'},
'environment': {'key': 'environment', 'type': 'EnvironmentDefinition'},
'history': {'key': 'history', 'type': 'HistoryConfiguration'},
'spark': {'key': 'spark', 'type': 'SparkConfiguration'},
'parallel_task': {'key': 'parallelTask', 'type': 'ParallelTaskConfiguration'},
'tensorflow': {'key': 'tensorflow', 'type': 'TensorflowConfiguration'},
'mpi': {'key': 'mpi', 'type': 'MpiConfiguration'},
'py_torch': {'key': 'pyTorch', 'type': 'PyTorchConfiguration'},
'ray': {'key': 'ray', 'type': 'RayConfiguration'},
'hdi': {'key': 'hdi', 'type': 'HdiConfiguration'},
'docker': {'key': 'docker', 'type': 'DockerConfiguration'},
'command_return_code_config': {'key': 'commandReturnCodeConfig', 'type': 'CommandReturnCodeConfig'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'application_endpoints': {'key': 'applicationEndpoints', 'type': '{ApplicationEndpointConfiguration}'},
'parameters': {'key': 'parameters', 'type': '[ParameterDefinition]'},
'autologger_settings': {'key': 'autologgerSettings', 'type': 'AutologgerSettings'},
'data_bricks': {'key': 'dataBricks', 'type': 'DatabricksConfiguration'},
'training_diagnostic_config': {'key': 'trainingDiagnosticConfig', 'type': 'TrainingDiagnosticConfiguration'},
'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword script:
:paramtype script: str
:keyword script_type: Possible values include: "Python", "Notebook".
:paramtype script_type: str or ~flow.models.ScriptType
:keyword command:
:paramtype command: str
:keyword use_absolute_path:
:paramtype use_absolute_path: bool
:keyword arguments:
:paramtype arguments: list[str]
:keyword framework: Possible values include: "Python", "PySpark", "Cntk", "TensorFlow",
"PyTorch", "PySparkInteractive", "R".
:paramtype framework: str or ~flow.models.Framework
:keyword communicator: Possible values include: "None", "ParameterServer", "Gloo", "Mpi",
"Nccl", "ParallelTask".
:paramtype communicator: str or ~flow.models.Communicator
:keyword target:
:paramtype target: str
:keyword auto_cluster_compute_specification:
:paramtype auto_cluster_compute_specification: ~flow.models.AutoClusterComputeSpecification
:keyword data_references: Dictionary of :code:`<DataReferenceConfiguration>`.
:paramtype data_references: dict[str, ~flow.models.DataReferenceConfiguration]
:keyword data: Dictionary of :code:`<Data>`.
:paramtype data: dict[str, ~flow.models.Data]
:keyword input_assets: Dictionary of :code:`<InputAsset>`.
:paramtype input_assets: dict[str, ~flow.models.InputAsset]
:keyword output_data: Dictionary of :code:`<OutputData>`.
:paramtype output_data: dict[str, ~flow.models.OutputData]
:keyword datacaches:
:paramtype datacaches: list[~flow.models.DatacacheConfiguration]
:keyword job_name:
:paramtype job_name: str
:keyword max_run_duration_seconds:
:paramtype max_run_duration_seconds: long
:keyword node_count:
:paramtype node_count: int
:keyword max_node_count:
:paramtype max_node_count: int
:keyword instance_types:
:paramtype instance_types: list[str]
:keyword priority:
:paramtype priority: int
:keyword credential_passthrough:
:paramtype credential_passthrough: bool
:keyword identity:
:paramtype identity: ~flow.models.IdentityConfiguration
:keyword environment:
:paramtype environment: ~flow.models.EnvironmentDefinition
:keyword history:
:paramtype history: ~flow.models.HistoryConfiguration
:keyword spark:
:paramtype spark: ~flow.models.SparkConfiguration
:keyword parallel_task:
:paramtype parallel_task: ~flow.models.ParallelTaskConfiguration
:keyword tensorflow:
:paramtype tensorflow: ~flow.models.TensorflowConfiguration
:keyword mpi:
:paramtype mpi: ~flow.models.MpiConfiguration
:keyword py_torch:
:paramtype py_torch: ~flow.models.PyTorchConfiguration
:keyword ray:
:paramtype ray: ~flow.models.RayConfiguration
:keyword hdi:
:paramtype hdi: ~flow.models.HdiConfiguration
:keyword docker:
:paramtype docker: ~flow.models.DockerConfiguration
:keyword command_return_code_config:
:paramtype command_return_code_config: ~flow.models.CommandReturnCodeConfig
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:paramtype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:keyword parameters:
:paramtype parameters: list[~flow.models.ParameterDefinition]
:keyword autologger_settings:
:paramtype autologger_settings: ~flow.models.AutologgerSettings
:keyword data_bricks:
:paramtype data_bricks: ~flow.models.DatabricksConfiguration
:keyword training_diagnostic_config:
:paramtype training_diagnostic_config: ~flow.models.TrainingDiagnosticConfiguration
:keyword secrets_configuration: Dictionary of :code:`<SecretConfiguration>`.
:paramtype secrets_configuration: dict[str, ~flow.models.SecretConfiguration]
"""
super(RunConfiguration, self).__init__(**kwargs)
self.script = kwargs.get('script', None)
self.script_type = kwargs.get('script_type', None)
self.command = kwargs.get('command', None)
self.use_absolute_path = kwargs.get('use_absolute_path', None)
self.arguments = kwargs.get('arguments', None)
self.framework = kwargs.get('framework', None)
self.communicator = kwargs.get('communicator', None)
self.target = kwargs.get('target', None)
self.auto_cluster_compute_specification = kwargs.get('auto_cluster_compute_specification', None)
self.data_references = kwargs.get('data_references', None)
self.data = kwargs.get('data', None)
self.input_assets = kwargs.get('input_assets', None)
self.output_data = kwargs.get('output_data', None)
self.datacaches = kwargs.get('datacaches', None)
self.job_name = kwargs.get('job_name', None)
self.max_run_duration_seconds = kwargs.get('max_run_duration_seconds', None)
self.node_count = kwargs.get('node_count', None)
self.max_node_count = kwargs.get('max_node_count', None)
self.instance_types = kwargs.get('instance_types', None)
self.priority = kwargs.get('priority', None)
self.credential_passthrough = kwargs.get('credential_passthrough', None)
self.identity = kwargs.get('identity', None)
self.environment = kwargs.get('environment', None)
self.history = kwargs.get('history', None)
self.spark = kwargs.get('spark', None)
self.parallel_task = kwargs.get('parallel_task', None)
self.tensorflow = kwargs.get('tensorflow', None)
self.mpi = kwargs.get('mpi', None)
self.py_torch = kwargs.get('py_torch', None)
self.ray = kwargs.get('ray', None)
self.hdi = kwargs.get('hdi', None)
self.docker = kwargs.get('docker', None)
self.command_return_code_config = kwargs.get('command_return_code_config', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.application_endpoints = kwargs.get('application_endpoints', None)
self.parameters = kwargs.get('parameters', None)
self.autologger_settings = kwargs.get('autologger_settings', None)
self.data_bricks = kwargs.get('data_bricks', None)
self.training_diagnostic_config = kwargs.get('training_diagnostic_config', None)
self.secrets_configuration = kwargs.get('secrets_configuration', None)
class RunDatasetReference(msrest.serialization.Model):
"""RunDatasetReference.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
"""
super(RunDatasetReference, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
class RunDefinition(msrest.serialization.Model):
"""RunDefinition.
:ivar configuration:
:vartype configuration: ~flow.models.RunConfiguration
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar snapshots:
:vartype snapshots: list[~flow.models.Snapshot]
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar run_type:
:vartype run_type: str
:ivar display_name:
:vartype display_name: str
:ivar environment_asset_id:
:vartype environment_asset_id: str
:ivar primary_metric_name:
:vartype primary_metric_name: str
:ivar description:
:vartype description: str
:ivar cancel_reason:
:vartype cancel_reason: str
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
"""
_attribute_map = {
'configuration': {'key': 'configuration', 'type': 'RunConfiguration'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'snapshots': {'key': 'snapshots', 'type': '[Snapshot]'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'},
'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'cancel_reason': {'key': 'cancelReason', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword configuration:
:paramtype configuration: ~flow.models.RunConfiguration
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword snapshots:
:paramtype snapshots: list[~flow.models.Snapshot]
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword run_type:
:paramtype run_type: str
:keyword display_name:
:paramtype display_name: str
:keyword environment_asset_id:
:paramtype environment_asset_id: str
:keyword primary_metric_name:
:paramtype primary_metric_name: str
:keyword description:
:paramtype description: str
:keyword cancel_reason:
:paramtype cancel_reason: str
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
"""
super(RunDefinition, self).__init__(**kwargs)
self.configuration = kwargs.get('configuration', None)
self.snapshot_id = kwargs.get('snapshot_id', None)
self.snapshots = kwargs.get('snapshots', None)
self.parent_run_id = kwargs.get('parent_run_id', None)
self.run_type = kwargs.get('run_type', None)
self.display_name = kwargs.get('display_name', None)
self.environment_asset_id = kwargs.get('environment_asset_id', None)
self.primary_metric_name = kwargs.get('primary_metric_name', None)
self.description = kwargs.get('description', None)
self.cancel_reason = kwargs.get('cancel_reason', None)
self.properties = kwargs.get('properties', None)
self.tags = kwargs.get('tags', None)
class RunDetailsDto(msrest.serialization.Model):
"""RunDetailsDto.
:ivar run_id:
:vartype run_id: str
:ivar run_uuid:
:vartype run_uuid: str
:ivar parent_run_uuid:
:vartype parent_run_uuid: str
:ivar root_run_uuid:
:vartype root_run_uuid: str
:ivar target:
:vartype target: str
:ivar status:
:vartype status: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar data_container_id:
:vartype data_container_id: str
:ivar created_time_utc:
:vartype created_time_utc: ~datetime.datetime
:ivar start_time_utc:
:vartype start_time_utc: ~datetime.datetime
:ivar end_time_utc:
:vartype end_time_utc: ~datetime.datetime
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
:ivar warnings:
:vartype warnings: list[~flow.models.RunDetailsWarningDto]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar parameters: Dictionary of :code:`<any>`.
:vartype parameters: dict[str, any]
:ivar services: This is a dictionary.
:vartype services: dict[str, ~flow.models.EndpointSetting]
:ivar input_datasets:
:vartype input_datasets: list[~flow.models.DatasetLineage]
:ivar output_datasets:
:vartype output_datasets: list[~flow.models.OutputDatasetLineage]
:ivar run_definition: Anything.
:vartype run_definition: any
:ivar log_files: This is a dictionary.
:vartype log_files: dict[str, str]
:ivar job_cost:
:vartype job_cost: ~flow.models.JobCost
:ivar revision:
:vartype revision: long
:ivar run_type_v2:
:vartype run_type_v2: ~flow.models.RunTypeV2
:ivar settings: This is a dictionary.
:vartype settings: dict[str, str]
:ivar compute_request:
:vartype compute_request: ~flow.models.ComputeRequest
:ivar compute:
:vartype compute: ~flow.models.Compute
:ivar created_by:
:vartype created_by: ~flow.models.User
:ivar compute_duration:
:vartype compute_duration: str
:ivar effective_start_time_utc:
:vartype effective_start_time_utc: ~datetime.datetime
:ivar run_number:
:vartype run_number: int
:ivar root_run_id:
:vartype root_run_id: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar user_id:
:vartype user_id: str
:ivar status_revision:
:vartype status_revision: long
:ivar current_compute_time:
:vartype current_compute_time: str
:ivar last_start_time_utc:
:vartype last_start_time_utc: ~datetime.datetime
:ivar last_modified_by:
:vartype last_modified_by: ~flow.models.User
:ivar last_modified_utc:
:vartype last_modified_utc: ~datetime.datetime
:ivar duration:
:vartype duration: str
:ivar inputs: Dictionary of :code:`<TypedAssetReference>`.
:vartype inputs: dict[str, ~flow.models.TypedAssetReference]
:ivar outputs: Dictionary of :code:`<TypedAssetReference>`.
:vartype outputs: dict[str, ~flow.models.TypedAssetReference]
:ivar current_attempt_id:
:vartype current_attempt_id: int
"""
_validation = {
'input_datasets': {'unique': True},
'output_datasets': {'unique': True},
}
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'run_uuid': {'key': 'runUuid', 'type': 'str'},
'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'},
'root_run_uuid': {'key': 'rootRunUuid', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'created_time_utc': {'key': 'createdTimeUtc', 'type': 'iso-8601'},
'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
'warnings': {'key': 'warnings', 'type': '[RunDetailsWarningDto]'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'parameters': {'key': 'parameters', 'type': '{object}'},
'services': {'key': 'services', 'type': '{EndpointSetting}'},
'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'},
'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'},
'run_definition': {'key': 'runDefinition', 'type': 'object'},
'log_files': {'key': 'logFiles', 'type': '{str}'},
'job_cost': {'key': 'jobCost', 'type': 'JobCost'},
'revision': {'key': 'revision', 'type': 'long'},
'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2'},
'settings': {'key': 'settings', 'type': '{str}'},
'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'},
'compute': {'key': 'compute', 'type': 'Compute'},
'created_by': {'key': 'createdBy', 'type': 'User'},
'compute_duration': {'key': 'computeDuration', 'type': 'str'},
'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'},
'run_number': {'key': 'runNumber', 'type': 'int'},
'root_run_id': {'key': 'rootRunId', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'user_id': {'key': 'userId', 'type': 'str'},
'status_revision': {'key': 'statusRevision', 'type': 'long'},
'current_compute_time': {'key': 'currentComputeTime', 'type': 'str'},
'last_start_time_utc': {'key': 'lastStartTimeUtc', 'type': 'iso-8601'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'User'},
'last_modified_utc': {'key': 'lastModifiedUtc', 'type': 'iso-8601'},
'duration': {'key': 'duration', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '{TypedAssetReference}'},
'outputs': {'key': 'outputs', 'type': '{TypedAssetReference}'},
'current_attempt_id': {'key': 'currentAttemptId', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword run_uuid:
:paramtype run_uuid: str
:keyword parent_run_uuid:
:paramtype parent_run_uuid: str
:keyword root_run_uuid:
:paramtype root_run_uuid: str
:keyword target:
:paramtype target: str
:keyword status:
:paramtype status: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword data_container_id:
:paramtype data_container_id: str
:keyword created_time_utc:
:paramtype created_time_utc: ~datetime.datetime
:keyword start_time_utc:
:paramtype start_time_utc: ~datetime.datetime
:keyword end_time_utc:
:paramtype end_time_utc: ~datetime.datetime
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
:keyword warnings:
:paramtype warnings: list[~flow.models.RunDetailsWarningDto]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword parameters: Dictionary of :code:`<any>`.
:paramtype parameters: dict[str, any]
:keyword services: This is a dictionary.
:paramtype services: dict[str, ~flow.models.EndpointSetting]
:keyword input_datasets:
:paramtype input_datasets: list[~flow.models.DatasetLineage]
:keyword output_datasets:
:paramtype output_datasets: list[~flow.models.OutputDatasetLineage]
:keyword run_definition: Anything.
:paramtype run_definition: any
:keyword log_files: This is a dictionary.
:paramtype log_files: dict[str, str]
:keyword job_cost:
:paramtype job_cost: ~flow.models.JobCost
:keyword revision:
:paramtype revision: long
:keyword run_type_v2:
:paramtype run_type_v2: ~flow.models.RunTypeV2
:keyword settings: This is a dictionary.
:paramtype settings: dict[str, str]
:keyword compute_request:
:paramtype compute_request: ~flow.models.ComputeRequest
:keyword compute:
:paramtype compute: ~flow.models.Compute
:keyword created_by:
:paramtype created_by: ~flow.models.User
:keyword compute_duration:
:paramtype compute_duration: str
:keyword effective_start_time_utc:
:paramtype effective_start_time_utc: ~datetime.datetime
:keyword run_number:
:paramtype run_number: int
:keyword root_run_id:
:paramtype root_run_id: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword user_id:
:paramtype user_id: str
:keyword status_revision:
:paramtype status_revision: long
:keyword current_compute_time:
:paramtype current_compute_time: str
:keyword last_start_time_utc:
:paramtype last_start_time_utc: ~datetime.datetime
:keyword last_modified_by:
:paramtype last_modified_by: ~flow.models.User
:keyword last_modified_utc:
:paramtype last_modified_utc: ~datetime.datetime
:keyword duration:
:paramtype duration: str
:keyword inputs: Dictionary of :code:`<TypedAssetReference>`.
:paramtype inputs: dict[str, ~flow.models.TypedAssetReference]
:keyword outputs: Dictionary of :code:`<TypedAssetReference>`.
:paramtype outputs: dict[str, ~flow.models.TypedAssetReference]
:keyword current_attempt_id:
:paramtype current_attempt_id: int
"""
super(RunDetailsDto, self).__init__(**kwargs)
self.run_id = kwargs.get('run_id', None)
self.run_uuid = kwargs.get('run_uuid', None)
self.parent_run_uuid = kwargs.get('parent_run_uuid', None)
self.root_run_uuid = kwargs.get('root_run_uuid', None)
self.target = kwargs.get('target', None)
self.status = kwargs.get('status', None)
self.parent_run_id = kwargs.get('parent_run_id', None)
self.data_container_id = kwargs.get('data_container_id', None)
self.created_time_utc = kwargs.get('created_time_utc', None)
self.start_time_utc = kwargs.get('start_time_utc', None)
self.end_time_utc = kwargs.get('end_time_utc', None)
self.error = kwargs.get('error', None)
self.warnings = kwargs.get('warnings', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.parameters = kwargs.get('parameters', None)
self.services = kwargs.get('services', None)
self.input_datasets = kwargs.get('input_datasets', None)
self.output_datasets = kwargs.get('output_datasets', None)
self.run_definition = kwargs.get('run_definition', None)
self.log_files = kwargs.get('log_files', None)
self.job_cost = kwargs.get('job_cost', None)
self.revision = kwargs.get('revision', None)
self.run_type_v2 = kwargs.get('run_type_v2', None)
self.settings = kwargs.get('settings', None)
self.compute_request = kwargs.get('compute_request', None)
self.compute = kwargs.get('compute', None)
self.created_by = kwargs.get('created_by', None)
self.compute_duration = kwargs.get('compute_duration', None)
self.effective_start_time_utc = kwargs.get('effective_start_time_utc', None)
self.run_number = kwargs.get('run_number', None)
self.root_run_id = kwargs.get('root_run_id', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.user_id = kwargs.get('user_id', None)
self.status_revision = kwargs.get('status_revision', None)
self.current_compute_time = kwargs.get('current_compute_time', None)
self.last_start_time_utc = kwargs.get('last_start_time_utc', None)
self.last_modified_by = kwargs.get('last_modified_by', None)
self.last_modified_utc = kwargs.get('last_modified_utc', None)
self.duration = kwargs.get('duration', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.current_attempt_id = kwargs.get('current_attempt_id', None)
class RunDetailsWarningDto(msrest.serialization.Model):
"""RunDetailsWarningDto.
:ivar source:
:vartype source: str
:ivar message:
:vartype message: str
"""
_attribute_map = {
'source': {'key': 'source', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword source:
:paramtype source: str
:keyword message:
:paramtype message: str
"""
super(RunDetailsWarningDto, self).__init__(**kwargs)
self.source = kwargs.get('source', None)
self.message = kwargs.get('message', None)
class RunDto(msrest.serialization.Model):
"""RunDto.
:ivar run_number:
:vartype run_number: int
:ivar root_run_id:
:vartype root_run_id: str
:ivar created_utc:
:vartype created_utc: ~datetime.datetime
:ivar created_by:
:vartype created_by: ~flow.models.User
:ivar user_id:
:vartype user_id: str
:ivar token:
:vartype token: str
:ivar token_expiry_time_utc:
:vartype token_expiry_time_utc: ~datetime.datetime
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
:ivar warnings:
:vartype warnings: list[~flow.models.RunDetailsWarningDto]
:ivar revision:
:vartype revision: long
:ivar status_revision:
:vartype status_revision: long
:ivar run_uuid:
:vartype run_uuid: str
:ivar parent_run_uuid:
:vartype parent_run_uuid: str
:ivar root_run_uuid:
:vartype root_run_uuid: str
:ivar last_start_time_utc:
:vartype last_start_time_utc: ~datetime.datetime
:ivar current_compute_time:
:vartype current_compute_time: str
:ivar compute_duration:
:vartype compute_duration: str
:ivar effective_start_time_utc:
:vartype effective_start_time_utc: ~datetime.datetime
:ivar last_modified_by:
:vartype last_modified_by: ~flow.models.User
:ivar last_modified_utc:
:vartype last_modified_utc: ~datetime.datetime
:ivar duration:
:vartype duration: str
:ivar cancelation_reason:
:vartype cancelation_reason: str
:ivar current_attempt_id:
:vartype current_attempt_id: int
:ivar run_id:
:vartype run_id: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar status:
:vartype status: str
:ivar start_time_utc:
:vartype start_time_utc: ~datetime.datetime
:ivar end_time_utc:
:vartype end_time_utc: ~datetime.datetime
:ivar schedule_id:
:vartype schedule_id: str
:ivar display_name:
:vartype display_name: str
:ivar name:
:vartype name: str
:ivar data_container_id:
:vartype data_container_id: str
:ivar description:
:vartype description: str
:ivar hidden:
:vartype hidden: bool
:ivar run_type:
:vartype run_type: str
:ivar run_type_v2:
:vartype run_type_v2: ~flow.models.RunTypeV2
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar parameters: Dictionary of :code:`<any>`.
:vartype parameters: dict[str, any]
:ivar action_uris: Dictionary of :code:`<string>`.
:vartype action_uris: dict[str, str]
:ivar script_name:
:vartype script_name: str
:ivar target:
:vartype target: str
:ivar unique_child_run_compute_targets:
:vartype unique_child_run_compute_targets: list[str]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar settings: Dictionary of :code:`<string>`.
:vartype settings: dict[str, str]
:ivar services: Dictionary of :code:`<EndpointSetting>`.
:vartype services: dict[str, ~flow.models.EndpointSetting]
:ivar input_datasets:
:vartype input_datasets: list[~flow.models.DatasetLineage]
:ivar output_datasets:
:vartype output_datasets: list[~flow.models.OutputDatasetLineage]
:ivar run_definition: Anything.
:vartype run_definition: any
:ivar job_specification: Anything.
:vartype job_specification: any
:ivar primary_metric_name:
:vartype primary_metric_name: str
:ivar created_from:
:vartype created_from: ~flow.models.CreatedFromDto
:ivar cancel_uri:
:vartype cancel_uri: str
:ivar complete_uri:
:vartype complete_uri: str
:ivar diagnostics_uri:
:vartype diagnostics_uri: str
:ivar compute_request:
:vartype compute_request: ~flow.models.ComputeRequest
:ivar compute:
:vartype compute: ~flow.models.Compute
:ivar retain_for_lifetime_of_workspace:
:vartype retain_for_lifetime_of_workspace: bool
:ivar queueing_info:
:vartype queueing_info: ~flow.models.QueueingInfo
:ivar inputs: Dictionary of :code:`<TypedAssetReference>`.
:vartype inputs: dict[str, ~flow.models.TypedAssetReference]
:ivar outputs: Dictionary of :code:`<TypedAssetReference>`.
:vartype outputs: dict[str, ~flow.models.TypedAssetReference]
"""
_validation = {
'unique_child_run_compute_targets': {'unique': True},
'input_datasets': {'unique': True},
'output_datasets': {'unique': True},
}
_attribute_map = {
'run_number': {'key': 'runNumber', 'type': 'int'},
'root_run_id': {'key': 'rootRunId', 'type': 'str'},
'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
'created_by': {'key': 'createdBy', 'type': 'User'},
'user_id': {'key': 'userId', 'type': 'str'},
'token': {'key': 'token', 'type': 'str'},
'token_expiry_time_utc': {'key': 'tokenExpiryTimeUtc', 'type': 'iso-8601'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
'warnings': {'key': 'warnings', 'type': '[RunDetailsWarningDto]'},
'revision': {'key': 'revision', 'type': 'long'},
'status_revision': {'key': 'statusRevision', 'type': 'long'},
'run_uuid': {'key': 'runUuid', 'type': 'str'},
'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'},
'root_run_uuid': {'key': 'rootRunUuid', 'type': 'str'},
'last_start_time_utc': {'key': 'lastStartTimeUtc', 'type': 'iso-8601'},
'current_compute_time': {'key': 'currentComputeTime', 'type': 'str'},
'compute_duration': {'key': 'computeDuration', 'type': 'str'},
'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'User'},
'last_modified_utc': {'key': 'lastModifiedUtc', 'type': 'iso-8601'},
'duration': {'key': 'duration', 'type': 'str'},
'cancelation_reason': {'key': 'cancelationReason', 'type': 'str'},
'current_attempt_id': {'key': 'currentAttemptId', 'type': 'int'},
'run_id': {'key': 'runId', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
'schedule_id': {'key': 'scheduleId', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'hidden': {'key': 'hidden', 'type': 'bool'},
'run_type': {'key': 'runType', 'type': 'str'},
'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2'},
'properties': {'key': 'properties', 'type': '{str}'},
'parameters': {'key': 'parameters', 'type': '{object}'},
'action_uris': {'key': 'actionUris', 'type': '{str}'},
'script_name': {'key': 'scriptName', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'},
'tags': {'key': 'tags', 'type': '{str}'},
'settings': {'key': 'settings', 'type': '{str}'},
'services': {'key': 'services', 'type': '{EndpointSetting}'},
'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'},
'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'},
'run_definition': {'key': 'runDefinition', 'type': 'object'},
'job_specification': {'key': 'jobSpecification', 'type': 'object'},
'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
'created_from': {'key': 'createdFrom', 'type': 'CreatedFromDto'},
'cancel_uri': {'key': 'cancelUri', 'type': 'str'},
'complete_uri': {'key': 'completeUri', 'type': 'str'},
'diagnostics_uri': {'key': 'diagnosticsUri', 'type': 'str'},
'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'},
'compute': {'key': 'compute', 'type': 'Compute'},
'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'},
'queueing_info': {'key': 'queueingInfo', 'type': 'QueueingInfo'},
'inputs': {'key': 'inputs', 'type': '{TypedAssetReference}'},
'outputs': {'key': 'outputs', 'type': '{TypedAssetReference}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_number:
:paramtype run_number: int
:keyword root_run_id:
:paramtype root_run_id: str
:keyword created_utc:
:paramtype created_utc: ~datetime.datetime
:keyword created_by:
:paramtype created_by: ~flow.models.User
:keyword user_id:
:paramtype user_id: str
:keyword token:
:paramtype token: str
:keyword token_expiry_time_utc:
:paramtype token_expiry_time_utc: ~datetime.datetime
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
:keyword warnings:
:paramtype warnings: list[~flow.models.RunDetailsWarningDto]
:keyword revision:
:paramtype revision: long
:keyword status_revision:
:paramtype status_revision: long
:keyword run_uuid:
:paramtype run_uuid: str
:keyword parent_run_uuid:
:paramtype parent_run_uuid: str
:keyword root_run_uuid:
:paramtype root_run_uuid: str
:keyword last_start_time_utc:
:paramtype last_start_time_utc: ~datetime.datetime
:keyword current_compute_time:
:paramtype current_compute_time: str
:keyword compute_duration:
:paramtype compute_duration: str
:keyword effective_start_time_utc:
:paramtype effective_start_time_utc: ~datetime.datetime
:keyword last_modified_by:
:paramtype last_modified_by: ~flow.models.User
:keyword last_modified_utc:
:paramtype last_modified_utc: ~datetime.datetime
:keyword duration:
:paramtype duration: str
:keyword cancelation_reason:
:paramtype cancelation_reason: str
:keyword current_attempt_id:
:paramtype current_attempt_id: int
:keyword run_id:
:paramtype run_id: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword status:
:paramtype status: str
:keyword start_time_utc:
:paramtype start_time_utc: ~datetime.datetime
:keyword end_time_utc:
:paramtype end_time_utc: ~datetime.datetime
:keyword schedule_id:
:paramtype schedule_id: str
:keyword display_name:
:paramtype display_name: str
:keyword name:
:paramtype name: str
:keyword data_container_id:
:paramtype data_container_id: str
:keyword description:
:paramtype description: str
:keyword hidden:
:paramtype hidden: bool
:keyword run_type:
:paramtype run_type: str
:keyword run_type_v2:
:paramtype run_type_v2: ~flow.models.RunTypeV2
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword parameters: Dictionary of :code:`<any>`.
:paramtype parameters: dict[str, any]
:keyword action_uris: Dictionary of :code:`<string>`.
:paramtype action_uris: dict[str, str]
:keyword script_name:
:paramtype script_name: str
:keyword target:
:paramtype target: str
:keyword unique_child_run_compute_targets:
:paramtype unique_child_run_compute_targets: list[str]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword settings: Dictionary of :code:`<string>`.
:paramtype settings: dict[str, str]
:keyword services: Dictionary of :code:`<EndpointSetting>`.
:paramtype services: dict[str, ~flow.models.EndpointSetting]
:keyword input_datasets:
:paramtype input_datasets: list[~flow.models.DatasetLineage]
:keyword output_datasets:
:paramtype output_datasets: list[~flow.models.OutputDatasetLineage]
:keyword run_definition: Anything.
:paramtype run_definition: any
:keyword job_specification: Anything.
:paramtype job_specification: any
:keyword primary_metric_name:
:paramtype primary_metric_name: str
:keyword created_from:
:paramtype created_from: ~flow.models.CreatedFromDto
:keyword cancel_uri:
:paramtype cancel_uri: str
:keyword complete_uri:
:paramtype complete_uri: str
:keyword diagnostics_uri:
:paramtype diagnostics_uri: str
:keyword compute_request:
:paramtype compute_request: ~flow.models.ComputeRequest
:keyword compute:
:paramtype compute: ~flow.models.Compute
:keyword retain_for_lifetime_of_workspace:
:paramtype retain_for_lifetime_of_workspace: bool
:keyword queueing_info:
:paramtype queueing_info: ~flow.models.QueueingInfo
:keyword inputs: Dictionary of :code:`<TypedAssetReference>`.
:paramtype inputs: dict[str, ~flow.models.TypedAssetReference]
:keyword outputs: Dictionary of :code:`<TypedAssetReference>`.
:paramtype outputs: dict[str, ~flow.models.TypedAssetReference]
"""
super(RunDto, self).__init__(**kwargs)
self.run_number = kwargs.get('run_number', None)
self.root_run_id = kwargs.get('root_run_id', None)
self.created_utc = kwargs.get('created_utc', None)
self.created_by = kwargs.get('created_by', None)
self.user_id = kwargs.get('user_id', None)
self.token = kwargs.get('token', None)
self.token_expiry_time_utc = kwargs.get('token_expiry_time_utc', None)
self.error = kwargs.get('error', None)
self.warnings = kwargs.get('warnings', None)
self.revision = kwargs.get('revision', None)
self.status_revision = kwargs.get('status_revision', None)
self.run_uuid = kwargs.get('run_uuid', None)
self.parent_run_uuid = kwargs.get('parent_run_uuid', None)
self.root_run_uuid = kwargs.get('root_run_uuid', None)
self.last_start_time_utc = kwargs.get('last_start_time_utc', None)
self.current_compute_time = kwargs.get('current_compute_time', None)
self.compute_duration = kwargs.get('compute_duration', None)
self.effective_start_time_utc = kwargs.get('effective_start_time_utc', None)
self.last_modified_by = kwargs.get('last_modified_by', None)
self.last_modified_utc = kwargs.get('last_modified_utc', None)
self.duration = kwargs.get('duration', None)
self.cancelation_reason = kwargs.get('cancelation_reason', None)
self.current_attempt_id = kwargs.get('current_attempt_id', None)
self.run_id = kwargs.get('run_id', None)
self.parent_run_id = kwargs.get('parent_run_id', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.status = kwargs.get('status', None)
self.start_time_utc = kwargs.get('start_time_utc', None)
self.end_time_utc = kwargs.get('end_time_utc', None)
self.schedule_id = kwargs.get('schedule_id', None)
self.display_name = kwargs.get('display_name', None)
self.name = kwargs.get('name', None)
self.data_container_id = kwargs.get('data_container_id', None)
self.description = kwargs.get('description', None)
self.hidden = kwargs.get('hidden', None)
self.run_type = kwargs.get('run_type', None)
self.run_type_v2 = kwargs.get('run_type_v2', None)
self.properties = kwargs.get('properties', None)
self.parameters = kwargs.get('parameters', None)
self.action_uris = kwargs.get('action_uris', None)
self.script_name = kwargs.get('script_name', None)
self.target = kwargs.get('target', None)
self.unique_child_run_compute_targets = kwargs.get('unique_child_run_compute_targets', None)
self.tags = kwargs.get('tags', None)
self.settings = kwargs.get('settings', None)
self.services = kwargs.get('services', None)
self.input_datasets = kwargs.get('input_datasets', None)
self.output_datasets = kwargs.get('output_datasets', None)
self.run_definition = kwargs.get('run_definition', None)
self.job_specification = kwargs.get('job_specification', None)
self.primary_metric_name = kwargs.get('primary_metric_name', None)
self.created_from = kwargs.get('created_from', None)
self.cancel_uri = kwargs.get('cancel_uri', None)
self.complete_uri = kwargs.get('complete_uri', None)
self.diagnostics_uri = kwargs.get('diagnostics_uri', None)
self.compute_request = kwargs.get('compute_request', None)
self.compute = kwargs.get('compute', None)
self.retain_for_lifetime_of_workspace = kwargs.get('retain_for_lifetime_of_workspace', None)
self.queueing_info = kwargs.get('queueing_info', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
class RunIndexEntity(msrest.serialization.Model):
"""RunIndexEntity.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar schema_id:
:vartype schema_id: str
:ivar entity_id:
:vartype entity_id: str
:ivar kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned".
:vartype kind: str or ~flow.models.EntityKind
:ivar annotations:
:vartype annotations: ~flow.models.RunAnnotations
:ivar properties:
:vartype properties: ~flow.models.RunProperties
:ivar internal: Any object.
:vartype internal: any
:ivar update_sequence:
:vartype update_sequence: long
:ivar type:
:vartype type: str
:ivar version:
:vartype version: str
:ivar entity_container_id:
:vartype entity_container_id: str
:ivar entity_object_id:
:vartype entity_object_id: str
:ivar resource_type:
:vartype resource_type: str
:ivar relationships:
:vartype relationships: list[~flow.models.Relationship]
:ivar asset_id:
:vartype asset_id: str
"""
_validation = {
'version': {'readonly': True},
'entity_container_id': {'readonly': True},
'entity_object_id': {'readonly': True},
'resource_type': {'readonly': True},
}
_attribute_map = {
'schema_id': {'key': 'schemaId', 'type': 'str'},
'entity_id': {'key': 'entityId', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'annotations': {'key': 'annotations', 'type': 'RunAnnotations'},
'properties': {'key': 'properties', 'type': 'RunProperties'},
'internal': {'key': 'internal', 'type': 'object'},
'update_sequence': {'key': 'updateSequence', 'type': 'long'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'entity_container_id': {'key': 'entityContainerId', 'type': 'str'},
'entity_object_id': {'key': 'entityObjectId', 'type': 'str'},
'resource_type': {'key': 'resourceType', 'type': 'str'},
'relationships': {'key': 'relationships', 'type': '[Relationship]'},
'asset_id': {'key': 'assetId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword schema_id:
:paramtype schema_id: str
:keyword entity_id:
:paramtype entity_id: str
:keyword kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned".
:paramtype kind: str or ~flow.models.EntityKind
:keyword annotations:
:paramtype annotations: ~flow.models.RunAnnotations
:keyword properties:
:paramtype properties: ~flow.models.RunProperties
:keyword internal: Any object.
:paramtype internal: any
:keyword update_sequence:
:paramtype update_sequence: long
:keyword type:
:paramtype type: str
:keyword relationships:
:paramtype relationships: list[~flow.models.Relationship]
:keyword asset_id:
:paramtype asset_id: str
"""
super(RunIndexEntity, self).__init__(**kwargs)
self.schema_id = kwargs.get('schema_id', None)
self.entity_id = kwargs.get('entity_id', None)
self.kind = kwargs.get('kind', None)
self.annotations = kwargs.get('annotations', None)
self.properties = kwargs.get('properties', None)
self.internal = kwargs.get('internal', None)
self.update_sequence = kwargs.get('update_sequence', None)
self.type = kwargs.get('type', None)
self.version = None
self.entity_container_id = None
self.entity_object_id = None
self.resource_type = None
self.relationships = kwargs.get('relationships', None)
self.asset_id = kwargs.get('asset_id', None)
class RunIndexMetricSummary(msrest.serialization.Model):
"""RunIndexMetricSummary.
:ivar count:
:vartype count: long
:ivar last_value: Anything.
:vartype last_value: any
:ivar minimum_value: Anything.
:vartype minimum_value: any
:ivar maximum_value: Anything.
:vartype maximum_value: any
:ivar metric_type:
:vartype metric_type: str
"""
_attribute_map = {
'count': {'key': 'count', 'type': 'long'},
'last_value': {'key': 'lastValue', 'type': 'object'},
'minimum_value': {'key': 'minimumValue', 'type': 'object'},
'maximum_value': {'key': 'maximumValue', 'type': 'object'},
'metric_type': {'key': 'metricType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword count:
:paramtype count: long
:keyword last_value: Anything.
:paramtype last_value: any
:keyword minimum_value: Anything.
:paramtype minimum_value: any
:keyword maximum_value: Anything.
:paramtype maximum_value: any
:keyword metric_type:
:paramtype metric_type: str
"""
super(RunIndexMetricSummary, self).__init__(**kwargs)
self.count = kwargs.get('count', None)
self.last_value = kwargs.get('last_value', None)
self.minimum_value = kwargs.get('minimum_value', None)
self.maximum_value = kwargs.get('maximum_value', None)
self.metric_type = kwargs.get('metric_type', None)
class RunIndexMetricSummarySystemObject(msrest.serialization.Model):
"""RunIndexMetricSummarySystemObject.
:ivar count:
:vartype count: long
:ivar last_value: Anything.
:vartype last_value: any
:ivar minimum_value: Anything.
:vartype minimum_value: any
:ivar maximum_value: Anything.
:vartype maximum_value: any
:ivar metric_type:
:vartype metric_type: str
"""
_attribute_map = {
'count': {'key': 'count', 'type': 'long'},
'last_value': {'key': 'lastValue', 'type': 'object'},
'minimum_value': {'key': 'minimumValue', 'type': 'object'},
'maximum_value': {'key': 'maximumValue', 'type': 'object'},
'metric_type': {'key': 'metricType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword count:
:paramtype count: long
:keyword last_value: Anything.
:paramtype last_value: any
:keyword minimum_value: Anything.
:paramtype minimum_value: any
:keyword maximum_value: Anything.
:paramtype maximum_value: any
:keyword metric_type:
:paramtype metric_type: str
"""
super(RunIndexMetricSummarySystemObject, self).__init__(**kwargs)
self.count = kwargs.get('count', None)
self.last_value = kwargs.get('last_value', None)
self.minimum_value = kwargs.get('minimum_value', None)
self.maximum_value = kwargs.get('maximum_value', None)
self.metric_type = kwargs.get('metric_type', None)
class RunIndexResourceMetricSummary(msrest.serialization.Model):
"""RunIndexResourceMetricSummary.
:ivar gpu_utilization_percent_last_hour:
:vartype gpu_utilization_percent_last_hour: float
:ivar gpu_memory_utilization_percent_last_hour:
:vartype gpu_memory_utilization_percent_last_hour: float
:ivar gpu_energy_joules:
:vartype gpu_energy_joules: float
:ivar resource_metric_names:
:vartype resource_metric_names: list[str]
"""
_attribute_map = {
'gpu_utilization_percent_last_hour': {'key': 'gpuUtilizationPercentLastHour', 'type': 'float'},
'gpu_memory_utilization_percent_last_hour': {'key': 'gpuMemoryUtilizationPercentLastHour', 'type': 'float'},
'gpu_energy_joules': {'key': 'gpuEnergyJoules', 'type': 'float'},
'resource_metric_names': {'key': 'resourceMetricNames', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword gpu_utilization_percent_last_hour:
:paramtype gpu_utilization_percent_last_hour: float
:keyword gpu_memory_utilization_percent_last_hour:
:paramtype gpu_memory_utilization_percent_last_hour: float
:keyword gpu_energy_joules:
:paramtype gpu_energy_joules: float
:keyword resource_metric_names:
:paramtype resource_metric_names: list[str]
"""
super(RunIndexResourceMetricSummary, self).__init__(**kwargs)
self.gpu_utilization_percent_last_hour = kwargs.get('gpu_utilization_percent_last_hour', None)
self.gpu_memory_utilization_percent_last_hour = kwargs.get('gpu_memory_utilization_percent_last_hour', None)
self.gpu_energy_joules = kwargs.get('gpu_energy_joules', None)
self.resource_metric_names = kwargs.get('resource_metric_names', None)
class RunMetricDto(msrest.serialization.Model):
"""RunMetricDto.
:ivar run_id:
:vartype run_id: str
:ivar metric_id:
:vartype metric_id: str
:ivar data_container_id:
:vartype data_container_id: str
:ivar metric_type:
:vartype metric_type: str
:ivar created_utc:
:vartype created_utc: ~datetime.datetime
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar label:
:vartype label: str
:ivar num_cells:
:vartype num_cells: int
:ivar data_location:
:vartype data_location: str
:ivar cells:
:vartype cells: list[dict[str, any]]
:ivar schema:
:vartype schema: ~flow.models.MetricSchemaDto
"""
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'metric_id': {'key': 'metricId', 'type': 'str'},
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'metric_type': {'key': 'metricType', 'type': 'str'},
'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'num_cells': {'key': 'numCells', 'type': 'int'},
'data_location': {'key': 'dataLocation', 'type': 'str'},
'cells': {'key': 'cells', 'type': '[{object}]'},
'schema': {'key': 'schema', 'type': 'MetricSchemaDto'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword metric_id:
:paramtype metric_id: str
:keyword data_container_id:
:paramtype data_container_id: str
:keyword metric_type:
:paramtype metric_type: str
:keyword created_utc:
:paramtype created_utc: ~datetime.datetime
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword label:
:paramtype label: str
:keyword num_cells:
:paramtype num_cells: int
:keyword data_location:
:paramtype data_location: str
:keyword cells:
:paramtype cells: list[dict[str, any]]
:keyword schema:
:paramtype schema: ~flow.models.MetricSchemaDto
"""
super(RunMetricDto, self).__init__(**kwargs)
self.run_id = kwargs.get('run_id', None)
self.metric_id = kwargs.get('metric_id', None)
self.data_container_id = kwargs.get('data_container_id', None)
self.metric_type = kwargs.get('metric_type', None)
self.created_utc = kwargs.get('created_utc', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.label = kwargs.get('label', None)
self.num_cells = kwargs.get('num_cells', None)
self.data_location = kwargs.get('data_location', None)
self.cells = kwargs.get('cells', None)
self.schema = kwargs.get('schema', None)
class RunMetricsTypesDto(msrest.serialization.Model):
"""RunMetricsTypesDto.
:ivar name:
:vartype name: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type:
:paramtype type: str
"""
super(RunMetricsTypesDto, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
class RunProperties(msrest.serialization.Model):
"""RunProperties.
:ivar data_container_id:
:vartype data_container_id: str
:ivar target_name:
:vartype target_name: str
:ivar run_name:
:vartype run_name: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar run_id:
:vartype run_id: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar root_run_id:
:vartype root_run_id: str
:ivar run_type:
:vartype run_type: str
:ivar run_type_v2:
:vartype run_type_v2: ~flow.models.RunTypeV2Index
:ivar script_name:
:vartype script_name: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar run_uuid:
:vartype run_uuid: str
:ivar parent_run_uuid:
:vartype parent_run_uuid: str
:ivar run_number:
:vartype run_number: int
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar compute_request:
:vartype compute_request: ~flow.models.ComputeRequest
:ivar compute:
:vartype compute: ~flow.models.Compute
:ivar user_properties: This is a dictionary.
:vartype user_properties: dict[str, str]
:ivar action_uris: This is a dictionary.
:vartype action_uris: dict[str, str]
:ivar duration:
:vartype duration: str
:ivar duration_milliseconds:
:vartype duration_milliseconds: float
:ivar creation_context:
:vartype creation_context: ~flow.models.CreationContext
"""
_attribute_map = {
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'target_name': {'key': 'targetName', 'type': 'str'},
'run_name': {'key': 'runName', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'root_run_id': {'key': 'rootRunId', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2Index'},
'script_name': {'key': 'scriptName', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'run_uuid': {'key': 'runUuid', 'type': 'str'},
'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'},
'run_number': {'key': 'runNumber', 'type': 'int'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'},
'compute': {'key': 'compute', 'type': 'Compute'},
'user_properties': {'key': 'userProperties', 'type': '{str}'},
'action_uris': {'key': 'actionUris', 'type': '{str}'},
'duration': {'key': 'duration', 'type': 'str'},
'duration_milliseconds': {'key': 'durationMilliseconds', 'type': 'float'},
'creation_context': {'key': 'creationContext', 'type': 'CreationContext'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_container_id:
:paramtype data_container_id: str
:keyword target_name:
:paramtype target_name: str
:keyword run_name:
:paramtype run_name: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword run_id:
:paramtype run_id: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword root_run_id:
:paramtype root_run_id: str
:keyword run_type:
:paramtype run_type: str
:keyword run_type_v2:
:paramtype run_type_v2: ~flow.models.RunTypeV2Index
:keyword script_name:
:paramtype script_name: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword run_uuid:
:paramtype run_uuid: str
:keyword parent_run_uuid:
:paramtype parent_run_uuid: str
:keyword run_number:
:paramtype run_number: int
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword compute_request:
:paramtype compute_request: ~flow.models.ComputeRequest
:keyword compute:
:paramtype compute: ~flow.models.Compute
:keyword user_properties: This is a dictionary.
:paramtype user_properties: dict[str, str]
:keyword action_uris: This is a dictionary.
:paramtype action_uris: dict[str, str]
:keyword duration:
:paramtype duration: str
:keyword duration_milliseconds:
:paramtype duration_milliseconds: float
:keyword creation_context:
:paramtype creation_context: ~flow.models.CreationContext
"""
super(RunProperties, self).__init__(**kwargs)
self.data_container_id = kwargs.get('data_container_id', None)
self.target_name = kwargs.get('target_name', None)
self.run_name = kwargs.get('run_name', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.run_id = kwargs.get('run_id', None)
self.parent_run_id = kwargs.get('parent_run_id', None)
self.root_run_id = kwargs.get('root_run_id', None)
self.run_type = kwargs.get('run_type', None)
self.run_type_v2 = kwargs.get('run_type_v2', None)
self.script_name = kwargs.get('script_name', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.run_uuid = kwargs.get('run_uuid', None)
self.parent_run_uuid = kwargs.get('parent_run_uuid', None)
self.run_number = kwargs.get('run_number', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.compute_request = kwargs.get('compute_request', None)
self.compute = kwargs.get('compute', None)
self.user_properties = kwargs.get('user_properties', None)
self.action_uris = kwargs.get('action_uris', None)
self.duration = kwargs.get('duration', None)
self.duration_milliseconds = kwargs.get('duration_milliseconds', None)
self.creation_context = kwargs.get('creation_context', None)
class RunSettingParameter(msrest.serialization.Model):
"""RunSettingParameter.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar parameter_type: Possible values include: "Undefined", "Int", "Double", "Bool", "String",
"JsonString", "YamlString", "StringList".
:vartype parameter_type: str or ~flow.models.RunSettingParameterType
:ivar is_optional:
:vartype is_optional: bool
:ivar default_value:
:vartype default_value: str
:ivar lower_bound:
:vartype lower_bound: str
:ivar upper_bound:
:vartype upper_bound: str
:ivar description:
:vartype description: str
:ivar run_setting_ui_hint:
:vartype run_setting_ui_hint: ~flow.models.RunSettingUIParameterHint
:ivar argument_name:
:vartype argument_name: str
:ivar section_name:
:vartype section_name: str
:ivar section_description:
:vartype section_description: str
:ivar section_argument_name:
:vartype section_argument_name: str
:ivar examples:
:vartype examples: list[str]
:ivar enum_values:
:vartype enum_values: list[str]
:ivar enum_values_to_argument_strings: This is a dictionary.
:vartype enum_values_to_argument_strings: dict[str, str]
:ivar enabled_by_parameter_name:
:vartype enabled_by_parameter_name: str
:ivar enabled_by_parameter_values:
:vartype enabled_by_parameter_values: list[str]
:ivar disabled_by_parameters:
:vartype disabled_by_parameters: list[str]
:ivar module_run_setting_type: Possible values include: "Released", "Testing", "Legacy",
"Preview", "Integration", "All", "Default", "Full", "UxIntegration", "UxFull".
:vartype module_run_setting_type: str or ~flow.models.ModuleRunSettingTypes
:ivar linked_parameter_default_value_mapping: Dictionary of :code:`<string>`.
:vartype linked_parameter_default_value_mapping: dict[str, str]
:ivar linked_parameter_key_name:
:vartype linked_parameter_key_name: str
:ivar support_link_setting:
:vartype support_link_setting: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'parameter_type': {'key': 'parameterType', 'type': 'str'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'lower_bound': {'key': 'lowerBound', 'type': 'str'},
'upper_bound': {'key': 'upperBound', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'run_setting_ui_hint': {'key': 'runSettingUIHint', 'type': 'RunSettingUIParameterHint'},
'argument_name': {'key': 'argumentName', 'type': 'str'},
'section_name': {'key': 'sectionName', 'type': 'str'},
'section_description': {'key': 'sectionDescription', 'type': 'str'},
'section_argument_name': {'key': 'sectionArgumentName', 'type': 'str'},
'examples': {'key': 'examples', 'type': '[str]'},
'enum_values': {'key': 'enumValues', 'type': '[str]'},
'enum_values_to_argument_strings': {'key': 'enumValuesToArgumentStrings', 'type': '{str}'},
'enabled_by_parameter_name': {'key': 'enabledByParameterName', 'type': 'str'},
'enabled_by_parameter_values': {'key': 'enabledByParameterValues', 'type': '[str]'},
'disabled_by_parameters': {'key': 'disabledByParameters', 'type': '[str]'},
'module_run_setting_type': {'key': 'moduleRunSettingType', 'type': 'str'},
'linked_parameter_default_value_mapping': {'key': 'linkedParameterDefaultValueMapping', 'type': '{str}'},
'linked_parameter_key_name': {'key': 'linkedParameterKeyName', 'type': 'str'},
'support_link_setting': {'key': 'supportLinkSetting', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword parameter_type: Possible values include: "Undefined", "Int", "Double", "Bool",
"String", "JsonString", "YamlString", "StringList".
:paramtype parameter_type: str or ~flow.models.RunSettingParameterType
:keyword is_optional:
:paramtype is_optional: bool
:keyword default_value:
:paramtype default_value: str
:keyword lower_bound:
:paramtype lower_bound: str
:keyword upper_bound:
:paramtype upper_bound: str
:keyword description:
:paramtype description: str
:keyword run_setting_ui_hint:
:paramtype run_setting_ui_hint: ~flow.models.RunSettingUIParameterHint
:keyword argument_name:
:paramtype argument_name: str
:keyword section_name:
:paramtype section_name: str
:keyword section_description:
:paramtype section_description: str
:keyword section_argument_name:
:paramtype section_argument_name: str
:keyword examples:
:paramtype examples: list[str]
:keyword enum_values:
:paramtype enum_values: list[str]
:keyword enum_values_to_argument_strings: This is a dictionary.
:paramtype enum_values_to_argument_strings: dict[str, str]
:keyword enabled_by_parameter_name:
:paramtype enabled_by_parameter_name: str
:keyword enabled_by_parameter_values:
:paramtype enabled_by_parameter_values: list[str]
:keyword disabled_by_parameters:
:paramtype disabled_by_parameters: list[str]
:keyword module_run_setting_type: Possible values include: "Released", "Testing", "Legacy",
"Preview", "Integration", "All", "Default", "Full", "UxIntegration", "UxFull".
:paramtype module_run_setting_type: str or ~flow.models.ModuleRunSettingTypes
:keyword linked_parameter_default_value_mapping: Dictionary of :code:`<string>`.
:paramtype linked_parameter_default_value_mapping: dict[str, str]
:keyword linked_parameter_key_name:
:paramtype linked_parameter_key_name: str
:keyword support_link_setting:
:paramtype support_link_setting: bool
"""
super(RunSettingParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.label = kwargs.get('label', None)
self.parameter_type = kwargs.get('parameter_type', None)
self.is_optional = kwargs.get('is_optional', None)
self.default_value = kwargs.get('default_value', None)
self.lower_bound = kwargs.get('lower_bound', None)
self.upper_bound = kwargs.get('upper_bound', None)
self.description = kwargs.get('description', None)
self.run_setting_ui_hint = kwargs.get('run_setting_ui_hint', None)
self.argument_name = kwargs.get('argument_name', None)
self.section_name = kwargs.get('section_name', None)
self.section_description = kwargs.get('section_description', None)
self.section_argument_name = kwargs.get('section_argument_name', None)
self.examples = kwargs.get('examples', None)
self.enum_values = kwargs.get('enum_values', None)
self.enum_values_to_argument_strings = kwargs.get('enum_values_to_argument_strings', None)
self.enabled_by_parameter_name = kwargs.get('enabled_by_parameter_name', None)
self.enabled_by_parameter_values = kwargs.get('enabled_by_parameter_values', None)
self.disabled_by_parameters = kwargs.get('disabled_by_parameters', None)
self.module_run_setting_type = kwargs.get('module_run_setting_type', None)
self.linked_parameter_default_value_mapping = kwargs.get('linked_parameter_default_value_mapping', None)
self.linked_parameter_key_name = kwargs.get('linked_parameter_key_name', None)
self.support_link_setting = kwargs.get('support_link_setting', None)
class RunSettingParameterAssignment(msrest.serialization.Model):
"""RunSettingParameterAssignment.
:ivar use_graph_default_compute:
:vartype use_graph_default_compute: bool
:ivar mlc_compute_type:
:vartype mlc_compute_type: str
:ivar compute_run_settings:
:vartype compute_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar linked_parameter_name:
:vartype linked_parameter_name: str
:ivar value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:vartype value_type: str or ~flow.models.ParameterValueType
:ivar assignments_to_concatenate:
:vartype assignments_to_concatenate: list[~flow.models.ParameterAssignment]
:ivar data_path_assignment:
:vartype data_path_assignment: ~flow.models.LegacyDataPath
:ivar data_set_definition_value_assignment:
:vartype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue
:ivar name:
:vartype name: str
:ivar value:
:vartype value: str
"""
_attribute_map = {
'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'},
'mlc_compute_type': {'key': 'mlcComputeType', 'type': 'str'},
'compute_run_settings': {'key': 'computeRunSettings', 'type': '[RunSettingParameterAssignment]'},
'linked_parameter_name': {'key': 'linkedParameterName', 'type': 'str'},
'value_type': {'key': 'valueType', 'type': 'str'},
'assignments_to_concatenate': {'key': 'assignmentsToConcatenate', 'type': '[ParameterAssignment]'},
'data_path_assignment': {'key': 'dataPathAssignment', 'type': 'LegacyDataPath'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': 'DataSetDefinitionValue'},
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword use_graph_default_compute:
:paramtype use_graph_default_compute: bool
:keyword mlc_compute_type:
:paramtype mlc_compute_type: str
:keyword compute_run_settings:
:paramtype compute_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword linked_parameter_name:
:paramtype linked_parameter_name: str
:keyword value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:paramtype value_type: str or ~flow.models.ParameterValueType
:keyword assignments_to_concatenate:
:paramtype assignments_to_concatenate: list[~flow.models.ParameterAssignment]
:keyword data_path_assignment:
:paramtype data_path_assignment: ~flow.models.LegacyDataPath
:keyword data_set_definition_value_assignment:
:paramtype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue
:keyword name:
:paramtype name: str
:keyword value:
:paramtype value: str
"""
super(RunSettingParameterAssignment, self).__init__(**kwargs)
self.use_graph_default_compute = kwargs.get('use_graph_default_compute', None)
self.mlc_compute_type = kwargs.get('mlc_compute_type', None)
self.compute_run_settings = kwargs.get('compute_run_settings', None)
self.linked_parameter_name = kwargs.get('linked_parameter_name', None)
self.value_type = kwargs.get('value_type', None)
self.assignments_to_concatenate = kwargs.get('assignments_to_concatenate', None)
self.data_path_assignment = kwargs.get('data_path_assignment', None)
self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None)
self.name = kwargs.get('name', None)
self.value = kwargs.get('value', None)
class RunSettingUIParameterHint(msrest.serialization.Model):
"""RunSettingUIParameterHint.
:ivar ui_widget_type: Possible values include: "Default", "ComputeSelection", "JsonEditor",
"Mode", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep",
"DataStoreSelection", "Checkbox", "MultipleSelection", "HyperparameterConfiguration",
"JsonTextBox", "Connection", "Static".
:vartype ui_widget_type: str or ~flow.models.RunSettingUIWidgetTypeEnum
:ivar json_editor:
:vartype json_editor: ~flow.models.UIJsonEditor
:ivar yaml_editor:
:vartype yaml_editor: ~flow.models.UIYamlEditor
:ivar compute_selection:
:vartype compute_selection: ~flow.models.UIComputeSelection
:ivar hyperparameter_configuration:
:vartype hyperparameter_configuration: ~flow.models.UIHyperparameterConfiguration
:ivar ux_ignore:
:vartype ux_ignore: bool
:ivar anonymous:
:vartype anonymous: bool
:ivar support_reset:
:vartype support_reset: bool
"""
_attribute_map = {
'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'},
'json_editor': {'key': 'jsonEditor', 'type': 'UIJsonEditor'},
'yaml_editor': {'key': 'yamlEditor', 'type': 'UIYamlEditor'},
'compute_selection': {'key': 'computeSelection', 'type': 'UIComputeSelection'},
'hyperparameter_configuration': {'key': 'hyperparameterConfiguration', 'type': 'UIHyperparameterConfiguration'},
'ux_ignore': {'key': 'uxIgnore', 'type': 'bool'},
'anonymous': {'key': 'anonymous', 'type': 'bool'},
'support_reset': {'key': 'supportReset', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword ui_widget_type: Possible values include: "Default", "ComputeSelection", "JsonEditor",
"Mode", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep",
"DataStoreSelection", "Checkbox", "MultipleSelection", "HyperparameterConfiguration",
"JsonTextBox", "Connection", "Static".
:paramtype ui_widget_type: str or ~flow.models.RunSettingUIWidgetTypeEnum
:keyword json_editor:
:paramtype json_editor: ~flow.models.UIJsonEditor
:keyword yaml_editor:
:paramtype yaml_editor: ~flow.models.UIYamlEditor
:keyword compute_selection:
:paramtype compute_selection: ~flow.models.UIComputeSelection
:keyword hyperparameter_configuration:
:paramtype hyperparameter_configuration: ~flow.models.UIHyperparameterConfiguration
:keyword ux_ignore:
:paramtype ux_ignore: bool
:keyword anonymous:
:paramtype anonymous: bool
:keyword support_reset:
:paramtype support_reset: bool
"""
super(RunSettingUIParameterHint, self).__init__(**kwargs)
self.ui_widget_type = kwargs.get('ui_widget_type', None)
self.json_editor = kwargs.get('json_editor', None)
self.yaml_editor = kwargs.get('yaml_editor', None)
self.compute_selection = kwargs.get('compute_selection', None)
self.hyperparameter_configuration = kwargs.get('hyperparameter_configuration', None)
self.ux_ignore = kwargs.get('ux_ignore', None)
self.anonymous = kwargs.get('anonymous', None)
self.support_reset = kwargs.get('support_reset', None)
class RunStatusPeriod(msrest.serialization.Model):
"""RunStatusPeriod.
:ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype status: str or ~flow.models.RunStatus
:ivar sub_periods:
:vartype sub_periods: list[~flow.models.SubStatusPeriod]
:ivar start:
:vartype start: long
:ivar end:
:vartype end: long
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'sub_periods': {'key': 'subPeriods', 'type': '[SubStatusPeriod]'},
'start': {'key': 'start', 'type': 'long'},
'end': {'key': 'end', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
"""
:keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype status: str or ~flow.models.RunStatus
:keyword sub_periods:
:paramtype sub_periods: list[~flow.models.SubStatusPeriod]
:keyword start:
:paramtype start: long
:keyword end:
:paramtype end: long
"""
super(RunStatusPeriod, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.sub_periods = kwargs.get('sub_periods', None)
self.start = kwargs.get('start', None)
self.end = kwargs.get('end', None)
class RuntimeConfiguration(msrest.serialization.Model):
"""RuntimeConfiguration.
:ivar base_image:
:vartype base_image: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'base_image': {'key': 'baseImage', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword base_image:
:paramtype base_image: str
:keyword version:
:paramtype version: str
"""
super(RuntimeConfiguration, self).__init__(**kwargs)
self.base_image = kwargs.get('base_image', None)
self.version = kwargs.get('version', None)
class RunTypeV2(msrest.serialization.Model):
"""RunTypeV2.
:ivar orchestrator:
:vartype orchestrator: str
:ivar traits:
:vartype traits: list[str]
:ivar attribution:
:vartype attribution: str
:ivar compute_type:
:vartype compute_type: str
"""
_validation = {
'traits': {'unique': True},
}
_attribute_map = {
'orchestrator': {'key': 'orchestrator', 'type': 'str'},
'traits': {'key': 'traits', 'type': '[str]'},
'attribution': {'key': 'attribution', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword orchestrator:
:paramtype orchestrator: str
:keyword traits:
:paramtype traits: list[str]
:keyword attribution:
:paramtype attribution: str
:keyword compute_type:
:paramtype compute_type: str
"""
super(RunTypeV2, self).__init__(**kwargs)
self.orchestrator = kwargs.get('orchestrator', None)
self.traits = kwargs.get('traits', None)
self.attribution = kwargs.get('attribution', None)
self.compute_type = kwargs.get('compute_type', None)
class RunTypeV2Index(msrest.serialization.Model):
"""RunTypeV2Index.
:ivar orchestrator:
:vartype orchestrator: str
:ivar traits: Dictionary of :code:`<string>`.
:vartype traits: dict[str, str]
:ivar attribution:
:vartype attribution: str
:ivar compute_type:
:vartype compute_type: str
"""
_attribute_map = {
'orchestrator': {'key': 'orchestrator', 'type': 'str'},
'traits': {'key': 'traits', 'type': '{str}'},
'attribution': {'key': 'attribution', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword orchestrator:
:paramtype orchestrator: str
:keyword traits: Dictionary of :code:`<string>`.
:paramtype traits: dict[str, str]
:keyword attribution:
:paramtype attribution: str
:keyword compute_type:
:paramtype compute_type: str
"""
super(RunTypeV2Index, self).__init__(**kwargs)
self.orchestrator = kwargs.get('orchestrator', None)
self.traits = kwargs.get('traits', None)
self.attribution = kwargs.get('attribution', None)
self.compute_type = kwargs.get('compute_type', None)
class SampleMeta(msrest.serialization.Model):
"""SampleMeta.
:ivar image:
:vartype image: str
:ivar id:
:vartype id: str
:ivar display_name:
:vartype display_name: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar doc_link:
:vartype doc_link: str
:ivar tags: A set of tags.
:vartype tags: list[str]
:ivar created_at:
:vartype created_at: ~datetime.datetime
:ivar updated_at:
:vartype updated_at: ~datetime.datetime
:ivar feed_name:
:vartype feed_name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'image': {'key': 'image', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'doc_link': {'key': 'docLink', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
'updated_at': {'key': 'updatedAt', 'type': 'iso-8601'},
'feed_name': {'key': 'feedName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword image:
:paramtype image: str
:keyword id:
:paramtype id: str
:keyword display_name:
:paramtype display_name: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword doc_link:
:paramtype doc_link: str
:keyword tags: A set of tags.
:paramtype tags: list[str]
:keyword created_at:
:paramtype created_at: ~datetime.datetime
:keyword updated_at:
:paramtype updated_at: ~datetime.datetime
:keyword feed_name:
:paramtype feed_name: str
:keyword version:
:paramtype version: str
"""
super(SampleMeta, self).__init__(**kwargs)
self.image = kwargs.get('image', None)
self.id = kwargs.get('id', None)
self.display_name = kwargs.get('display_name', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.doc_link = kwargs.get('doc_link', None)
self.tags = kwargs.get('tags', None)
self.created_at = kwargs.get('created_at', None)
self.updated_at = kwargs.get('updated_at', None)
self.feed_name = kwargs.get('feed_name', None)
self.version = kwargs.get('version', None)
class SavedDataSetReference(msrest.serialization.Model):
"""SavedDataSetReference.
:ivar id:
:vartype id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
"""
super(SavedDataSetReference, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
class SavePipelineDraftRequest(msrest.serialization.Model):
"""SavePipelineDraftRequest.
:ivar ui_widget_meta_infos:
:vartype ui_widget_meta_infos: list[~flow.models.UIWidgetMetaInfo]
:ivar web_service_inputs:
:vartype web_service_inputs: list[~flow.models.WebServicePort]
:ivar web_service_outputs:
:vartype web_service_outputs: list[~flow.models.WebServicePort]
:ivar nodes_in_draft:
:vartype nodes_in_draft: list[str]
:ivar name:
:vartype name: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:vartype graph_components_mode: str or ~flow.models.GraphComponentsMode
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "AssetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI",
"AssetInDpv2WithDatasetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI",
"DatasetAndAssetInDpv2WithDatasetInDesignerUI", "Dataset", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'ui_widget_meta_infos': {'key': 'uiWidgetMetaInfos', 'type': '[UIWidgetMetaInfo]'},
'web_service_inputs': {'key': 'webServiceInputs', 'type': '[WebServicePort]'},
'web_service_outputs': {'key': 'webServiceOutputs', 'type': '[WebServicePort]'},
'nodes_in_draft': {'key': 'nodesInDraft', 'type': '[str]'},
'name': {'key': 'name', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword ui_widget_meta_infos:
:paramtype ui_widget_meta_infos: list[~flow.models.UIWidgetMetaInfo]
:keyword web_service_inputs:
:paramtype web_service_inputs: list[~flow.models.WebServicePort]
:keyword web_service_outputs:
:paramtype web_service_outputs: list[~flow.models.WebServicePort]
:keyword nodes_in_draft:
:paramtype nodes_in_draft: list[str]
:keyword name:
:paramtype name: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "AssetInDesignerUI",
"DatasetInDpv2WithDatasetInDesignerUI", "AssetInDpv2WithDatasetInDesignerUI",
"AssetInDpv2WithAssetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "Dataset",
"Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(SavePipelineDraftRequest, self).__init__(**kwargs)
self.ui_widget_meta_infos = kwargs.get('ui_widget_meta_infos', None)
self.web_service_inputs = kwargs.get('web_service_inputs', None)
self.web_service_outputs = kwargs.get('web_service_outputs', None)
self.nodes_in_draft = kwargs.get('nodes_in_draft', None)
self.name = kwargs.get('name', None)
self.pipeline_type = kwargs.get('pipeline_type', None)
self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None)
self.graph_components_mode = kwargs.get('graph_components_mode', None)
self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None)
self.flattened_sub_graphs = kwargs.get('flattened_sub_graphs', None)
self.pipeline_parameters = kwargs.get('pipeline_parameters', None)
self.data_path_assignments = kwargs.get('data_path_assignments', None)
self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None)
self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None)
self.graph = kwargs.get('graph', None)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.module_node_run_settings = kwargs.get('module_node_run_settings', None)
self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None)
self.tags = kwargs.get('tags', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.description = kwargs.get('description', None)
self.properties = kwargs.get('properties', None)
self.enforce_rerun = kwargs.get('enforce_rerun', None)
self.dataset_access_modes = kwargs.get('dataset_access_modes', None)
class ScheduleBase(msrest.serialization.Model):
"""ScheduleBase.
:ivar schedule_status: Possible values include: "Enabled", "Disabled".
:vartype schedule_status: str or ~flow.models.MfeInternalScheduleStatus
:ivar schedule_type: Possible values include: "Cron", "Recurrence".
:vartype schedule_type: str or ~flow.models.ScheduleType
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar time_zone:
:vartype time_zone: str
:ivar expression:
:vartype expression: str
:ivar frequency: Possible values include: "Minute", "Hour", "Day", "Week", "Month".
:vartype frequency: str or ~flow.models.RecurrenceFrequency
:ivar interval:
:vartype interval: int
:ivar pattern:
:vartype pattern: ~flow.models.RecurrencePattern
"""
_attribute_map = {
'schedule_status': {'key': 'scheduleStatus', 'type': 'str'},
'schedule_type': {'key': 'scheduleType', 'type': 'str'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'time_zone': {'key': 'timeZone', 'type': 'str'},
'expression': {'key': 'expression', 'type': 'str'},
'frequency': {'key': 'frequency', 'type': 'str'},
'interval': {'key': 'interval', 'type': 'int'},
'pattern': {'key': 'pattern', 'type': 'RecurrencePattern'},
}
def __init__(
self,
**kwargs
):
"""
:keyword schedule_status: Possible values include: "Enabled", "Disabled".
:paramtype schedule_status: str or ~flow.models.MfeInternalScheduleStatus
:keyword schedule_type: Possible values include: "Cron", "Recurrence".
:paramtype schedule_type: str or ~flow.models.ScheduleType
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword time_zone:
:paramtype time_zone: str
:keyword expression:
:paramtype expression: str
:keyword frequency: Possible values include: "Minute", "Hour", "Day", "Week", "Month".
:paramtype frequency: str or ~flow.models.RecurrenceFrequency
:keyword interval:
:paramtype interval: int
:keyword pattern:
:paramtype pattern: ~flow.models.RecurrencePattern
"""
super(ScheduleBase, self).__init__(**kwargs)
self.schedule_status = kwargs.get('schedule_status', None)
self.schedule_type = kwargs.get('schedule_type', None)
self.end_time = kwargs.get('end_time', None)
self.start_time = kwargs.get('start_time', None)
self.time_zone = kwargs.get('time_zone', None)
self.expression = kwargs.get('expression', None)
self.frequency = kwargs.get('frequency', None)
self.interval = kwargs.get('interval', None)
self.pattern = kwargs.get('pattern', None)
class SchemaContractsCreatedBy(msrest.serialization.Model):
"""SchemaContractsCreatedBy.
:ivar user_object_id:
:vartype user_object_id: str
:ivar user_tenant_id:
:vartype user_tenant_id: str
:ivar user_name:
:vartype user_name: str
:ivar user_principal_name:
:vartype user_principal_name: str
"""
_attribute_map = {
'user_object_id': {'key': 'userObjectId', 'type': 'str'},
'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
'user_name': {'key': 'userName', 'type': 'str'},
'user_principal_name': {'key': 'userPrincipalName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword user_object_id:
:paramtype user_object_id: str
:keyword user_tenant_id:
:paramtype user_tenant_id: str
:keyword user_name:
:paramtype user_name: str
:keyword user_principal_name:
:paramtype user_principal_name: str
"""
super(SchemaContractsCreatedBy, self).__init__(**kwargs)
self.user_object_id = kwargs.get('user_object_id', None)
self.user_tenant_id = kwargs.get('user_tenant_id', None)
self.user_name = kwargs.get('user_name', None)
self.user_principal_name = kwargs.get('user_principal_name', None)
class ScopeCloudConfiguration(msrest.serialization.Model):
"""ScopeCloudConfiguration.
:ivar input_path_suffixes: This is a dictionary.
:vartype input_path_suffixes: dict[str, ~flow.models.ArgumentAssignment]
:ivar output_path_suffixes: This is a dictionary.
:vartype output_path_suffixes: dict[str, ~flow.models.ArgumentAssignment]
:ivar user_alias:
:vartype user_alias: str
:ivar tokens:
:vartype tokens: int
:ivar auto_token:
:vartype auto_token: int
:ivar vcp:
:vartype vcp: float
"""
_attribute_map = {
'input_path_suffixes': {'key': 'inputPathSuffixes', 'type': '{ArgumentAssignment}'},
'output_path_suffixes': {'key': 'outputPathSuffixes', 'type': '{ArgumentAssignment}'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'tokens': {'key': 'tokens', 'type': 'int'},
'auto_token': {'key': 'autoToken', 'type': 'int'},
'vcp': {'key': 'vcp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
"""
:keyword input_path_suffixes: This is a dictionary.
:paramtype input_path_suffixes: dict[str, ~flow.models.ArgumentAssignment]
:keyword output_path_suffixes: This is a dictionary.
:paramtype output_path_suffixes: dict[str, ~flow.models.ArgumentAssignment]
:keyword user_alias:
:paramtype user_alias: str
:keyword tokens:
:paramtype tokens: int
:keyword auto_token:
:paramtype auto_token: int
:keyword vcp:
:paramtype vcp: float
"""
super(ScopeCloudConfiguration, self).__init__(**kwargs)
self.input_path_suffixes = kwargs.get('input_path_suffixes', None)
self.output_path_suffixes = kwargs.get('output_path_suffixes', None)
self.user_alias = kwargs.get('user_alias', None)
self.tokens = kwargs.get('tokens', None)
self.auto_token = kwargs.get('auto_token', None)
self.vcp = kwargs.get('vcp', None)
class Seasonality(msrest.serialization.Model):
"""Seasonality.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.SeasonalityMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.SeasonalityMode
:keyword value:
:paramtype value: int
"""
super(Seasonality, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class SecretConfiguration(msrest.serialization.Model):
"""SecretConfiguration.
:ivar workspace_secret_name:
:vartype workspace_secret_name: str
:ivar uri:
:vartype uri: str
"""
_attribute_map = {
'workspace_secret_name': {'key': 'workspace_secret_name', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword workspace_secret_name:
:paramtype workspace_secret_name: str
:keyword uri:
:paramtype uri: str
"""
super(SecretConfiguration, self).__init__(**kwargs)
self.workspace_secret_name = kwargs.get('workspace_secret_name', None)
self.uri = kwargs.get('uri', None)
class SegmentedResult1(msrest.serialization.Model):
"""SegmentedResult1.
:ivar value:
:vartype value: list[~flow.models.FlowIndexEntity]
:ivar continuation_token:
:vartype continuation_token: str
:ivar count:
:vartype count: int
:ivar next_link:
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[FlowIndexEntity]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'count': {'key': 'count', 'type': 'int'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value:
:paramtype value: list[~flow.models.FlowIndexEntity]
:keyword continuation_token:
:paramtype continuation_token: str
:keyword count:
:paramtype count: int
:keyword next_link:
:paramtype next_link: str
"""
super(SegmentedResult1, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.count = kwargs.get('count', None)
self.next_link = kwargs.get('next_link', None)
class ServiceLogRequest(msrest.serialization.Model):
"""ServiceLogRequest.
:ivar log_level: Possible values include: "Trace", "Debug", "Information", "Warning", "Error",
"Critical", "None".
:vartype log_level: str or ~flow.models.LogLevel
:ivar message:
:vartype message: str
:ivar timestamp:
:vartype timestamp: ~datetime.datetime
"""
_attribute_map = {
'log_level': {'key': 'logLevel', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword log_level: Possible values include: "Trace", "Debug", "Information", "Warning",
"Error", "Critical", "None".
:paramtype log_level: str or ~flow.models.LogLevel
:keyword message:
:paramtype message: str
:keyword timestamp:
:paramtype timestamp: ~datetime.datetime
"""
super(ServiceLogRequest, self).__init__(**kwargs)
self.log_level = kwargs.get('log_level', None)
self.message = kwargs.get('message', None)
self.timestamp = kwargs.get('timestamp', None)
class SessionApplication(msrest.serialization.Model):
"""SessionApplication.
:ivar name:
:vartype name: str
:ivar type:
:vartype type: str
:ivar image:
:vartype image: str
:ivar env_vars: Dictionary of :code:`<string>`.
:vartype env_vars: dict[str, str]
:ivar python_pip_requirements:
:vartype python_pip_requirements: list[str]
:ivar volumes:
:vartype volumes: list[~flow.models.Volume]
:ivar setup_results:
:vartype setup_results: list[~flow.models.SessionApplicationRunCommandResult]
:ivar port:
:vartype port: int
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'image': {'key': 'image', 'type': 'str'},
'env_vars': {'key': 'envVars', 'type': '{str}'},
'python_pip_requirements': {'key': 'pythonPipRequirements', 'type': '[str]'},
'volumes': {'key': 'volumes', 'type': '[Volume]'},
'setup_results': {'key': 'setupResults', 'type': '[SessionApplicationRunCommandResult]'},
'port': {'key': 'port', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type:
:paramtype type: str
:keyword image:
:paramtype image: str
:keyword env_vars: Dictionary of :code:`<string>`.
:paramtype env_vars: dict[str, str]
:keyword python_pip_requirements:
:paramtype python_pip_requirements: list[str]
:keyword volumes:
:paramtype volumes: list[~flow.models.Volume]
:keyword setup_results:
:paramtype setup_results: list[~flow.models.SessionApplicationRunCommandResult]
:keyword port:
:paramtype port: int
"""
super(SessionApplication, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.image = kwargs.get('image', None)
self.env_vars = kwargs.get('env_vars', None)
self.python_pip_requirements = kwargs.get('python_pip_requirements', None)
self.volumes = kwargs.get('volumes', None)
self.setup_results = kwargs.get('setup_results', None)
self.port = kwargs.get('port', None)
class SessionApplicationRunCommandResult(msrest.serialization.Model):
"""SessionApplicationRunCommandResult.
:ivar command:
:vartype command: str
:ivar arguments:
:vartype arguments: list[str]
:ivar exit_code:
:vartype exit_code: int
:ivar std_out:
:vartype std_out: str
:ivar std_err:
:vartype std_err: str
"""
_attribute_map = {
'command': {'key': 'command', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': '[str]'},
'exit_code': {'key': 'exitCode', 'type': 'int'},
'std_out': {'key': 'stdOut', 'type': 'str'},
'std_err': {'key': 'stdErr', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword command:
:paramtype command: str
:keyword arguments:
:paramtype arguments: list[str]
:keyword exit_code:
:paramtype exit_code: int
:keyword std_out:
:paramtype std_out: str
:keyword std_err:
:paramtype std_err: str
"""
super(SessionApplicationRunCommandResult, self).__init__(**kwargs)
self.command = kwargs.get('command', None)
self.arguments = kwargs.get('arguments', None)
self.exit_code = kwargs.get('exit_code', None)
self.std_out = kwargs.get('std_out', None)
self.std_err = kwargs.get('std_err', None)
class SessionProperties(msrest.serialization.Model):
"""SessionProperties.
:ivar session_id:
:vartype session_id: str
:ivar subscription_id:
:vartype subscription_id: str
:ivar resource_group_name:
:vartype resource_group_name: str
:ivar workspace_name:
:vartype workspace_name: str
:ivar existing_user_compute_instance_name:
:vartype existing_user_compute_instance_name: str
:ivar user_object_id:
:vartype user_object_id: str
:ivar user_tenant_id:
:vartype user_tenant_id: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar applications:
:vartype applications: list[~flow.models.SessionApplication]
:ivar application:
:vartype application: ~flow.models.SessionApplication
:ivar last_alive_time:
:vartype last_alive_time: ~datetime.datetime
"""
_attribute_map = {
'session_id': {'key': 'sessionId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group_name': {'key': 'resourceGroupName', 'type': 'str'},
'workspace_name': {'key': 'workspaceName', 'type': 'str'},
'existing_user_compute_instance_name': {'key': 'existingUserComputeInstanceName', 'type': 'str'},
'user_object_id': {'key': 'userObjectId', 'type': 'str'},
'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'applications': {'key': 'applications', 'type': '[SessionApplication]'},
'application': {'key': 'application', 'type': 'SessionApplication'},
'last_alive_time': {'key': 'lastAliveTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword session_id:
:paramtype session_id: str
:keyword subscription_id:
:paramtype subscription_id: str
:keyword resource_group_name:
:paramtype resource_group_name: str
:keyword workspace_name:
:paramtype workspace_name: str
:keyword existing_user_compute_instance_name:
:paramtype existing_user_compute_instance_name: str
:keyword user_object_id:
:paramtype user_object_id: str
:keyword user_tenant_id:
:paramtype user_tenant_id: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword applications:
:paramtype applications: list[~flow.models.SessionApplication]
:keyword application:
:paramtype application: ~flow.models.SessionApplication
:keyword last_alive_time:
:paramtype last_alive_time: ~datetime.datetime
"""
super(SessionProperties, self).__init__(**kwargs)
self.session_id = kwargs.get('session_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group_name = kwargs.get('resource_group_name', None)
self.workspace_name = kwargs.get('workspace_name', None)
self.existing_user_compute_instance_name = kwargs.get('existing_user_compute_instance_name', None)
self.user_object_id = kwargs.get('user_object_id', None)
self.user_tenant_id = kwargs.get('user_tenant_id', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.applications = kwargs.get('applications', None)
self.application = kwargs.get('application', None)
self.last_alive_time = kwargs.get('last_alive_time', None)
class SetupFlowSessionRequest(msrest.serialization.Model):
"""SetupFlowSessionRequest.
:ivar action: Possible values include: "Install", "Reset", "Update", "Delete".
:vartype action: str or ~flow.models.SetupFlowSessionAction
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
:ivar compute_name:
:vartype compute_name: str
"""
_attribute_map = {
'action': {'key': 'action', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
'compute_name': {'key': 'computeName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword action: Possible values include: "Install", "Reset", "Update", "Delete".
:paramtype action: str or ~flow.models.SetupFlowSessionAction
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
:keyword compute_name:
:paramtype compute_name: str
"""
super(SetupFlowSessionRequest, self).__init__(**kwargs)
self.action = kwargs.get('action', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.identity = kwargs.get('identity', None)
self.compute_name = kwargs.get('compute_name', None)
class SharingScope(msrest.serialization.Model):
"""SharingScope.
:ivar type: Possible values include: "Global", "Tenant", "Subscription", "ResourceGroup",
"Workspace".
:vartype type: str or ~flow.models.ScopeType
:ivar identifier:
:vartype identifier: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'identifier': {'key': 'identifier', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "Global", "Tenant", "Subscription", "ResourceGroup",
"Workspace".
:paramtype type: str or ~flow.models.ScopeType
:keyword identifier:
:paramtype identifier: str
"""
super(SharingScope, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.identifier = kwargs.get('identifier', None)
class Snapshot(msrest.serialization.Model):
"""Snapshot.
:ivar id:
:vartype id: str
:ivar directory_name:
:vartype directory_name: str
:ivar snapshot_asset_id:
:vartype snapshot_asset_id: str
:ivar snapshot_entity_id:
:vartype snapshot_entity_id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'directory_name': {'key': 'directoryName', 'type': 'str'},
'snapshot_asset_id': {'key': 'snapshotAssetId', 'type': 'str'},
'snapshot_entity_id': {'key': 'snapshotEntityId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword directory_name:
:paramtype directory_name: str
:keyword snapshot_asset_id:
:paramtype snapshot_asset_id: str
:keyword snapshot_entity_id:
:paramtype snapshot_entity_id: str
"""
super(Snapshot, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.directory_name = kwargs.get('directory_name', None)
self.snapshot_asset_id = kwargs.get('snapshot_asset_id', None)
self.snapshot_entity_id = kwargs.get('snapshot_entity_id', None)
class SnapshotInfo(msrest.serialization.Model):
"""SnapshotInfo.
:ivar root_download_url:
:vartype root_download_url: str
:ivar snapshots: This is a dictionary.
:vartype snapshots: dict[str, ~flow.models.DownloadResourceInfo]
"""
_attribute_map = {
'root_download_url': {'key': 'rootDownloadUrl', 'type': 'str'},
'snapshots': {'key': 'snapshots', 'type': '{DownloadResourceInfo}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword root_download_url:
:paramtype root_download_url: str
:keyword snapshots: This is a dictionary.
:paramtype snapshots: dict[str, ~flow.models.DownloadResourceInfo]
"""
super(SnapshotInfo, self).__init__(**kwargs)
self.root_download_url = kwargs.get('root_download_url', None)
self.snapshots = kwargs.get('snapshots', None)
class SourceCodeDataReference(msrest.serialization.Model):
"""SourceCodeDataReference.
:ivar data_store_name:
:vartype data_store_name: str
:ivar path:
:vartype path: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword path:
:paramtype path: str
"""
super(SourceCodeDataReference, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
self.path = kwargs.get('path', None)
class SparkConfiguration(msrest.serialization.Model):
"""SparkConfiguration.
:ivar configuration: Dictionary of :code:`<string>`.
:vartype configuration: dict[str, str]
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar py_files:
:vartype py_files: list[str]
:ivar spark_pool_resource_id:
:vartype spark_pool_resource_id: str
"""
_attribute_map = {
'configuration': {'key': 'configuration', 'type': '{str}'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'spark_pool_resource_id': {'key': 'sparkPoolResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword configuration: Dictionary of :code:`<string>`.
:paramtype configuration: dict[str, str]
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword py_files:
:paramtype py_files: list[str]
:keyword spark_pool_resource_id:
:paramtype spark_pool_resource_id: str
"""
super(SparkConfiguration, self).__init__(**kwargs)
self.configuration = kwargs.get('configuration', None)
self.files = kwargs.get('files', None)
self.archives = kwargs.get('archives', None)
self.jars = kwargs.get('jars', None)
self.py_files = kwargs.get('py_files', None)
self.spark_pool_resource_id = kwargs.get('spark_pool_resource_id', None)
class SparkJarTaskDto(msrest.serialization.Model):
"""SparkJarTaskDto.
:ivar main_class_name:
:vartype main_class_name: str
:ivar parameters:
:vartype parameters: list[str]
"""
_attribute_map = {
'main_class_name': {'key': 'main_class_name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword main_class_name:
:paramtype main_class_name: str
:keyword parameters:
:paramtype parameters: list[str]
"""
super(SparkJarTaskDto, self).__init__(**kwargs)
self.main_class_name = kwargs.get('main_class_name', None)
self.parameters = kwargs.get('parameters', None)
class SparkJob(msrest.serialization.Model):
"""SparkJob.
:ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:vartype job_type: str or ~flow.models.JobType
:ivar resources:
:vartype resources: ~flow.models.SparkResourceConfiguration
:ivar args:
:vartype args: str
:ivar code_id:
:vartype code_id: str
:ivar entry:
:vartype entry: ~flow.models.SparkJobEntry
:ivar py_files:
:vartype py_files: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar environment_id:
:vartype environment_id: str
:ivar input_data_bindings: Dictionary of :code:`<InputDataBinding>`.
:vartype input_data_bindings: dict[str, ~flow.models.InputDataBinding]
:ivar output_data_bindings: Dictionary of :code:`<OutputDataBinding>`.
:vartype output_data_bindings: dict[str, ~flow.models.OutputDataBinding]
:ivar conf: Dictionary of :code:`<string>`.
:vartype conf: dict[str, str]
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:vartype provisioning_state: str or ~flow.models.JobProvisioningState
:ivar parent_job_name:
:vartype parent_job_name: str
:ivar display_name:
:vartype display_name: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing",
"Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled",
"NotResponding", "Paused", "Unknown", "Scheduled".
:vartype status: str or ~flow.models.JobStatus
:ivar interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:vartype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:ivar identity:
:vartype identity: ~flow.models.MfeInternalIdentityConfiguration
:ivar compute:
:vartype compute: ~flow.models.ComputeConfiguration
:ivar priority:
:vartype priority: int
:ivar output:
:vartype output: ~flow.models.JobOutputArtifacts
:ivar is_archived:
:vartype is_archived: bool
:ivar schedule:
:vartype schedule: ~flow.models.ScheduleBase
:ivar component_id:
:vartype component_id: str
:ivar notification_setting:
:vartype notification_setting: ~flow.models.NotificationSetting
:ivar secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:vartype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'job_type': {'key': 'jobType', 'type': 'str'},
'resources': {'key': 'resources', 'type': 'SparkResourceConfiguration'},
'args': {'key': 'args', 'type': 'str'},
'code_id': {'key': 'codeId', 'type': 'str'},
'entry': {'key': 'entry', 'type': 'SparkJobEntry'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'environment_id': {'key': 'environmentId', 'type': 'str'},
'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'},
'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'},
'conf': {'key': 'conf', 'type': '{str}'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'parent_job_name': {'key': 'parentJobName', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
'identity': {'key': 'identity', 'type': 'MfeInternalIdentityConfiguration'},
'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
'priority': {'key': 'priority', 'type': 'int'},
'output': {'key': 'output', 'type': 'JobOutputArtifacts'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'schedule': {'key': 'schedule', 'type': 'ScheduleBase'},
'component_id': {'key': 'componentId', 'type': 'str'},
'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'},
'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{MfeInternalSecretConfiguration}'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:paramtype job_type: str or ~flow.models.JobType
:keyword resources:
:paramtype resources: ~flow.models.SparkResourceConfiguration
:keyword args:
:paramtype args: str
:keyword code_id:
:paramtype code_id: str
:keyword entry:
:paramtype entry: ~flow.models.SparkJobEntry
:keyword py_files:
:paramtype py_files: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword environment_id:
:paramtype environment_id: str
:keyword input_data_bindings: Dictionary of :code:`<InputDataBinding>`.
:paramtype input_data_bindings: dict[str, ~flow.models.InputDataBinding]
:keyword output_data_bindings: Dictionary of :code:`<OutputDataBinding>`.
:paramtype output_data_bindings: dict[str, ~flow.models.OutputDataBinding]
:keyword conf: Dictionary of :code:`<string>`.
:paramtype conf: dict[str, str]
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:paramtype provisioning_state: str or ~flow.models.JobProvisioningState
:keyword parent_job_name:
:paramtype parent_job_name: str
:keyword display_name:
:paramtype display_name: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword status: Possible values include: "NotStarted", "Starting", "Provisioning",
"Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed",
"Canceled", "NotResponding", "Paused", "Unknown", "Scheduled".
:paramtype status: str or ~flow.models.JobStatus
:keyword interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:paramtype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:keyword identity:
:paramtype identity: ~flow.models.MfeInternalIdentityConfiguration
:keyword compute:
:paramtype compute: ~flow.models.ComputeConfiguration
:keyword priority:
:paramtype priority: int
:keyword output:
:paramtype output: ~flow.models.JobOutputArtifacts
:keyword is_archived:
:paramtype is_archived: bool
:keyword schedule:
:paramtype schedule: ~flow.models.ScheduleBase
:keyword component_id:
:paramtype component_id: str
:keyword notification_setting:
:paramtype notification_setting: ~flow.models.NotificationSetting
:keyword secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:paramtype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(SparkJob, self).__init__(**kwargs)
self.job_type = kwargs.get('job_type', None)
self.resources = kwargs.get('resources', None)
self.args = kwargs.get('args', None)
self.code_id = kwargs.get('code_id', None)
self.entry = kwargs.get('entry', None)
self.py_files = kwargs.get('py_files', None)
self.jars = kwargs.get('jars', None)
self.files = kwargs.get('files', None)
self.archives = kwargs.get('archives', None)
self.environment_id = kwargs.get('environment_id', None)
self.input_data_bindings = kwargs.get('input_data_bindings', None)
self.output_data_bindings = kwargs.get('output_data_bindings', None)
self.conf = kwargs.get('conf', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.parent_job_name = kwargs.get('parent_job_name', None)
self.display_name = kwargs.get('display_name', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.status = kwargs.get('status', None)
self.interaction_endpoints = kwargs.get('interaction_endpoints', None)
self.identity = kwargs.get('identity', None)
self.compute = kwargs.get('compute', None)
self.priority = kwargs.get('priority', None)
self.output = kwargs.get('output', None)
self.is_archived = kwargs.get('is_archived', None)
self.schedule = kwargs.get('schedule', None)
self.component_id = kwargs.get('component_id', None)
self.notification_setting = kwargs.get('notification_setting', None)
self.secrets_configuration = kwargs.get('secrets_configuration', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
class SparkJobEntry(msrest.serialization.Model):
"""SparkJobEntry.
:ivar file:
:vartype file: str
:ivar class_name:
:vartype class_name: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'class_name': {'key': 'className', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword class_name:
:paramtype class_name: str
"""
super(SparkJobEntry, self).__init__(**kwargs)
self.file = kwargs.get('file', None)
self.class_name = kwargs.get('class_name', None)
class SparkMavenPackage(msrest.serialization.Model):
"""SparkMavenPackage.
:ivar group:
:vartype group: str
:ivar artifact:
:vartype artifact: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'group': {'key': 'group', 'type': 'str'},
'artifact': {'key': 'artifact', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword group:
:paramtype group: str
:keyword artifact:
:paramtype artifact: str
:keyword version:
:paramtype version: str
"""
super(SparkMavenPackage, self).__init__(**kwargs)
self.group = kwargs.get('group', None)
self.artifact = kwargs.get('artifact', None)
self.version = kwargs.get('version', None)
class SparkPythonTaskDto(msrest.serialization.Model):
"""SparkPythonTaskDto.
:ivar python_file:
:vartype python_file: str
:ivar parameters:
:vartype parameters: list[str]
"""
_attribute_map = {
'python_file': {'key': 'python_file', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword python_file:
:paramtype python_file: str
:keyword parameters:
:paramtype parameters: list[str]
"""
super(SparkPythonTaskDto, self).__init__(**kwargs)
self.python_file = kwargs.get('python_file', None)
self.parameters = kwargs.get('parameters', None)
class SparkResourceConfiguration(msrest.serialization.Model):
"""SparkResourceConfiguration.
:ivar instance_type:
:vartype instance_type: str
:ivar runtime_version:
:vartype runtime_version: str
"""
_attribute_map = {
'instance_type': {'key': 'instanceType', 'type': 'str'},
'runtime_version': {'key': 'runtimeVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword instance_type:
:paramtype instance_type: str
:keyword runtime_version:
:paramtype runtime_version: str
"""
super(SparkResourceConfiguration, self).__init__(**kwargs)
self.instance_type = kwargs.get('instance_type', None)
self.runtime_version = kwargs.get('runtime_version', None)
class SparkSection(msrest.serialization.Model):
"""SparkSection.
:ivar repositories:
:vartype repositories: list[str]
:ivar packages:
:vartype packages: list[~flow.models.SparkMavenPackage]
:ivar precache_packages:
:vartype precache_packages: bool
"""
_attribute_map = {
'repositories': {'key': 'repositories', 'type': '[str]'},
'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'},
'precache_packages': {'key': 'precachePackages', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword repositories:
:paramtype repositories: list[str]
:keyword packages:
:paramtype packages: list[~flow.models.SparkMavenPackage]
:keyword precache_packages:
:paramtype precache_packages: bool
"""
super(SparkSection, self).__init__(**kwargs)
self.repositories = kwargs.get('repositories', None)
self.packages = kwargs.get('packages', None)
self.precache_packages = kwargs.get('precache_packages', None)
class SparkSubmitTaskDto(msrest.serialization.Model):
"""SparkSubmitTaskDto.
:ivar parameters:
:vartype parameters: list[str]
"""
_attribute_map = {
'parameters': {'key': 'parameters', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword parameters:
:paramtype parameters: list[str]
"""
super(SparkSubmitTaskDto, self).__init__(**kwargs)
self.parameters = kwargs.get('parameters', None)
class SqlDataPath(msrest.serialization.Model):
"""SqlDataPath.
:ivar sql_table_name:
:vartype sql_table_name: str
:ivar sql_query:
:vartype sql_query: str
:ivar sql_stored_procedure_name:
:vartype sql_stored_procedure_name: str
:ivar sql_stored_procedure_params:
:vartype sql_stored_procedure_params: list[~flow.models.StoredProcedureParameter]
"""
_attribute_map = {
'sql_table_name': {'key': 'sqlTableName', 'type': 'str'},
'sql_query': {'key': 'sqlQuery', 'type': 'str'},
'sql_stored_procedure_name': {'key': 'sqlStoredProcedureName', 'type': 'str'},
'sql_stored_procedure_params': {'key': 'sqlStoredProcedureParams', 'type': '[StoredProcedureParameter]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword sql_table_name:
:paramtype sql_table_name: str
:keyword sql_query:
:paramtype sql_query: str
:keyword sql_stored_procedure_name:
:paramtype sql_stored_procedure_name: str
:keyword sql_stored_procedure_params:
:paramtype sql_stored_procedure_params: list[~flow.models.StoredProcedureParameter]
"""
super(SqlDataPath, self).__init__(**kwargs)
self.sql_table_name = kwargs.get('sql_table_name', None)
self.sql_query = kwargs.get('sql_query', None)
self.sql_stored_procedure_name = kwargs.get('sql_stored_procedure_name', None)
self.sql_stored_procedure_params = kwargs.get('sql_stored_procedure_params', None)
class StackEnsembleSettings(msrest.serialization.Model):
"""StackEnsembleSettings.
:ivar stack_meta_learner_type: Possible values include: "None", "LogisticRegression",
"LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV",
"LightGBMRegressor", "LinearRegression".
:vartype stack_meta_learner_type: str or ~flow.models.StackMetaLearnerType
:ivar stack_meta_learner_train_percentage:
:vartype stack_meta_learner_train_percentage: float
:ivar stack_meta_learner_k_wargs: Anything.
:vartype stack_meta_learner_k_wargs: any
"""
_attribute_map = {
'stack_meta_learner_type': {'key': 'stackMetaLearnerType', 'type': 'str'},
'stack_meta_learner_train_percentage': {'key': 'stackMetaLearnerTrainPercentage', 'type': 'float'},
'stack_meta_learner_k_wargs': {'key': 'stackMetaLearnerKWargs', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword stack_meta_learner_type: Possible values include: "None", "LogisticRegression",
"LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV",
"LightGBMRegressor", "LinearRegression".
:paramtype stack_meta_learner_type: str or ~flow.models.StackMetaLearnerType
:keyword stack_meta_learner_train_percentage:
:paramtype stack_meta_learner_train_percentage: float
:keyword stack_meta_learner_k_wargs: Anything.
:paramtype stack_meta_learner_k_wargs: any
"""
super(StackEnsembleSettings, self).__init__(**kwargs)
self.stack_meta_learner_type = kwargs.get('stack_meta_learner_type', None)
self.stack_meta_learner_train_percentage = kwargs.get('stack_meta_learner_train_percentage', None)
self.stack_meta_learner_k_wargs = kwargs.get('stack_meta_learner_k_wargs', None)
class StandbyPoolProperties(msrest.serialization.Model):
"""StandbyPoolProperties.
:ivar name:
:vartype name: str
:ivar count:
:vartype count: int
:ivar vm_size:
:vartype vm_size: str
:ivar standby_available_instances:
:vartype standby_available_instances: list[~flow.models.StandbyPoolResourceStatus]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'count': {'key': 'count', 'type': 'int'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'standby_available_instances': {'key': 'standbyAvailableInstances', 'type': '[StandbyPoolResourceStatus]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword count:
:paramtype count: int
:keyword vm_size:
:paramtype vm_size: str
:keyword standby_available_instances:
:paramtype standby_available_instances: list[~flow.models.StandbyPoolResourceStatus]
"""
super(StandbyPoolProperties, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.count = kwargs.get('count', None)
self.vm_size = kwargs.get('vm_size', None)
self.standby_available_instances = kwargs.get('standby_available_instances', None)
class StandbyPoolResourceStatus(msrest.serialization.Model):
"""StandbyPoolResourceStatus.
:ivar status:
:vartype status: str
:ivar error:
:vartype error: ~flow.models.CloudError
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'error': {'key': 'error', 'type': 'CloudError'},
}
def __init__(
self,
**kwargs
):
"""
:keyword status:
:paramtype status: str
:keyword error:
:paramtype error: ~flow.models.CloudError
"""
super(StandbyPoolResourceStatus, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.error = kwargs.get('error', None)
class StartRunResult(msrest.serialization.Model):
"""StartRunResult.
All required parameters must be populated in order to send to Azure.
:ivar run_id: Required.
:vartype run_id: str
"""
_validation = {
'run_id': {'required': True, 'min_length': 1},
}
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_id: Required.
:paramtype run_id: str
"""
super(StartRunResult, self).__init__(**kwargs)
self.run_id = kwargs['run_id']
class StepRunProfile(msrest.serialization.Model):
"""StepRunProfile.
:ivar step_run_id:
:vartype step_run_id: str
:ivar step_run_number:
:vartype step_run_number: int
:ivar run_url:
:vartype run_url: str
:ivar compute_target:
:vartype compute_target: str
:ivar compute_target_url:
:vartype compute_target_url: str
:ivar node_id:
:vartype node_id: str
:ivar node_name:
:vartype node_name: str
:ivar step_name:
:vartype step_name: str
:ivar create_time:
:vartype create_time: long
:ivar start_time:
:vartype start_time: long
:ivar end_time:
:vartype end_time: long
:ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar is_reused:
:vartype is_reused: bool
:ivar reused_pipeline_run_id:
:vartype reused_pipeline_run_id: str
:ivar reused_step_run_id:
:vartype reused_step_run_id: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar status_timeline:
:vartype status_timeline: list[~flow.models.RunStatusPeriod]
"""
_attribute_map = {
'step_run_id': {'key': 'stepRunId', 'type': 'str'},
'step_run_number': {'key': 'stepRunNumber', 'type': 'int'},
'run_url': {'key': 'runUrl', 'type': 'str'},
'compute_target': {'key': 'computeTarget', 'type': 'str'},
'compute_target_url': {'key': 'computeTargetUrl', 'type': 'str'},
'node_id': {'key': 'nodeId', 'type': 'str'},
'node_name': {'key': 'nodeName', 'type': 'str'},
'step_name': {'key': 'stepName', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'long'},
'start_time': {'key': 'startTime', 'type': 'long'},
'end_time': {'key': 'endTime', 'type': 'long'},
'status': {'key': 'status', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'is_reused': {'key': 'isReused', 'type': 'bool'},
'reused_pipeline_run_id': {'key': 'reusedPipelineRunId', 'type': 'str'},
'reused_step_run_id': {'key': 'reusedStepRunId', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'status_timeline': {'key': 'statusTimeline', 'type': '[RunStatusPeriod]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword step_run_id:
:paramtype step_run_id: str
:keyword step_run_number:
:paramtype step_run_number: int
:keyword run_url:
:paramtype run_url: str
:keyword compute_target:
:paramtype compute_target: str
:keyword compute_target_url:
:paramtype compute_target_url: str
:keyword node_id:
:paramtype node_id: str
:keyword node_name:
:paramtype node_name: str
:keyword step_name:
:paramtype step_name: str
:keyword create_time:
:paramtype create_time: long
:keyword start_time:
:paramtype start_time: long
:keyword end_time:
:paramtype end_time: long
:keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword is_reused:
:paramtype is_reused: bool
:keyword reused_pipeline_run_id:
:paramtype reused_pipeline_run_id: str
:keyword reused_step_run_id:
:paramtype reused_step_run_id: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword status_timeline:
:paramtype status_timeline: list[~flow.models.RunStatusPeriod]
"""
super(StepRunProfile, self).__init__(**kwargs)
self.step_run_id = kwargs.get('step_run_id', None)
self.step_run_number = kwargs.get('step_run_number', None)
self.run_url = kwargs.get('run_url', None)
self.compute_target = kwargs.get('compute_target', None)
self.compute_target_url = kwargs.get('compute_target_url', None)
self.node_id = kwargs.get('node_id', None)
self.node_name = kwargs.get('node_name', None)
self.step_name = kwargs.get('step_name', None)
self.create_time = kwargs.get('create_time', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.status = kwargs.get('status', None)
self.status_detail = kwargs.get('status_detail', None)
self.is_reused = kwargs.get('is_reused', None)
self.reused_pipeline_run_id = kwargs.get('reused_pipeline_run_id', None)
self.reused_step_run_id = kwargs.get('reused_step_run_id', None)
self.tags = kwargs.get('tags', None)
self.status_timeline = kwargs.get('status_timeline', None)
class StorageInfo(msrest.serialization.Model):
"""StorageInfo.
:ivar storage_auth_type: Possible values include: "MSI", "ConnectionString", "SAS".
:vartype storage_auth_type: str or ~flow.models.StorageAuthType
:ivar connection_string:
:vartype connection_string: str
:ivar sas_token:
:vartype sas_token: str
:ivar account_name:
:vartype account_name: str
"""
_attribute_map = {
'storage_auth_type': {'key': 'storageAuthType', 'type': 'str'},
'connection_string': {'key': 'connectionString', 'type': 'str'},
'sas_token': {'key': 'sasToken', 'type': 'str'},
'account_name': {'key': 'accountName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword storage_auth_type: Possible values include: "MSI", "ConnectionString", "SAS".
:paramtype storage_auth_type: str or ~flow.models.StorageAuthType
:keyword connection_string:
:paramtype connection_string: str
:keyword sas_token:
:paramtype sas_token: str
:keyword account_name:
:paramtype account_name: str
"""
super(StorageInfo, self).__init__(**kwargs)
self.storage_auth_type = kwargs.get('storage_auth_type', None)
self.connection_string = kwargs.get('connection_string', None)
self.sas_token = kwargs.get('sas_token', None)
self.account_name = kwargs.get('account_name', None)
class StoredProcedureParameter(msrest.serialization.Model):
"""StoredProcedureParameter.
:ivar name:
:vartype name: str
:ivar value:
:vartype value: str
:ivar type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
:vartype type: str or ~flow.models.StoredProcedureParameterType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword value:
:paramtype value: str
:keyword type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
:paramtype type: str or ~flow.models.StoredProcedureParameterType
"""
super(StoredProcedureParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.value = kwargs.get('value', None)
self.type = kwargs.get('type', None)
class Stream(msrest.serialization.Model):
"""Stream.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar can_read:
:vartype can_read: bool
:ivar can_write:
:vartype can_write: bool
:ivar can_seek:
:vartype can_seek: bool
:ivar can_timeout:
:vartype can_timeout: bool
:ivar length:
:vartype length: long
:ivar position:
:vartype position: long
:ivar read_timeout:
:vartype read_timeout: int
:ivar write_timeout:
:vartype write_timeout: int
"""
_validation = {
'can_read': {'readonly': True},
'can_write': {'readonly': True},
'can_seek': {'readonly': True},
'can_timeout': {'readonly': True},
'length': {'readonly': True},
}
_attribute_map = {
'can_read': {'key': 'canRead', 'type': 'bool'},
'can_write': {'key': 'canWrite', 'type': 'bool'},
'can_seek': {'key': 'canSeek', 'type': 'bool'},
'can_timeout': {'key': 'canTimeout', 'type': 'bool'},
'length': {'key': 'length', 'type': 'long'},
'position': {'key': 'position', 'type': 'long'},
'read_timeout': {'key': 'readTimeout', 'type': 'int'},
'write_timeout': {'key': 'writeTimeout', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword position:
:paramtype position: long
:keyword read_timeout:
:paramtype read_timeout: int
:keyword write_timeout:
:paramtype write_timeout: int
"""
super(Stream, self).__init__(**kwargs)
self.can_read = None
self.can_write = None
self.can_seek = None
self.can_timeout = None
self.length = None
self.position = kwargs.get('position', None)
self.read_timeout = kwargs.get('read_timeout', None)
self.write_timeout = kwargs.get('write_timeout', None)
class StructuredInterface(msrest.serialization.Model):
"""StructuredInterface.
:ivar command_line_pattern:
:vartype command_line_pattern: str
:ivar inputs:
:vartype inputs: list[~flow.models.StructuredInterfaceInput]
:ivar outputs:
:vartype outputs: list[~flow.models.StructuredInterfaceOutput]
:ivar control_outputs:
:vartype control_outputs: list[~flow.models.ControlOutput]
:ivar parameters:
:vartype parameters: list[~flow.models.StructuredInterfaceParameter]
:ivar metadata_parameters:
:vartype metadata_parameters: list[~flow.models.StructuredInterfaceParameter]
:ivar arguments:
:vartype arguments: list[~flow.models.ArgumentAssignment]
"""
_attribute_map = {
'command_line_pattern': {'key': 'commandLinePattern', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '[StructuredInterfaceInput]'},
'outputs': {'key': 'outputs', 'type': '[StructuredInterfaceOutput]'},
'control_outputs': {'key': 'controlOutputs', 'type': '[ControlOutput]'},
'parameters': {'key': 'parameters', 'type': '[StructuredInterfaceParameter]'},
'metadata_parameters': {'key': 'metadataParameters', 'type': '[StructuredInterfaceParameter]'},
'arguments': {'key': 'arguments', 'type': '[ArgumentAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword command_line_pattern:
:paramtype command_line_pattern: str
:keyword inputs:
:paramtype inputs: list[~flow.models.StructuredInterfaceInput]
:keyword outputs:
:paramtype outputs: list[~flow.models.StructuredInterfaceOutput]
:keyword control_outputs:
:paramtype control_outputs: list[~flow.models.ControlOutput]
:keyword parameters:
:paramtype parameters: list[~flow.models.StructuredInterfaceParameter]
:keyword metadata_parameters:
:paramtype metadata_parameters: list[~flow.models.StructuredInterfaceParameter]
:keyword arguments:
:paramtype arguments: list[~flow.models.ArgumentAssignment]
"""
super(StructuredInterface, self).__init__(**kwargs)
self.command_line_pattern = kwargs.get('command_line_pattern', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.control_outputs = kwargs.get('control_outputs', None)
self.parameters = kwargs.get('parameters', None)
self.metadata_parameters = kwargs.get('metadata_parameters', None)
self.arguments = kwargs.get('arguments', None)
class StructuredInterfaceInput(msrest.serialization.Model):
"""StructuredInterfaceInput.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar data_type_ids_list:
:vartype data_type_ids_list: list[str]
:ivar is_optional:
:vartype is_optional: bool
:ivar description:
:vartype description: str
:ivar skip_processing:
:vartype skip_processing: bool
:ivar is_resource:
:vartype is_resource: bool
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar dataset_types:
:vartype dataset_types: list[str or ~flow.models.DatasetType]
"""
_validation = {
'dataset_types': {'unique': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'data_type_ids_list': {'key': 'dataTypeIdsList', 'type': '[str]'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'skip_processing': {'key': 'skipProcessing', 'type': 'bool'},
'is_resource': {'key': 'isResource', 'type': 'bool'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'dataset_types': {'key': 'datasetTypes', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword data_type_ids_list:
:paramtype data_type_ids_list: list[str]
:keyword is_optional:
:paramtype is_optional: bool
:keyword description:
:paramtype description: str
:keyword skip_processing:
:paramtype skip_processing: bool
:keyword is_resource:
:paramtype is_resource: bool
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword dataset_types:
:paramtype dataset_types: list[str or ~flow.models.DatasetType]
"""
super(StructuredInterfaceInput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.label = kwargs.get('label', None)
self.data_type_ids_list = kwargs.get('data_type_ids_list', None)
self.is_optional = kwargs.get('is_optional', None)
self.description = kwargs.get('description', None)
self.skip_processing = kwargs.get('skip_processing', None)
self.is_resource = kwargs.get('is_resource', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.overwrite = kwargs.get('overwrite', None)
self.data_reference_name = kwargs.get('data_reference_name', None)
self.dataset_types = kwargs.get('dataset_types', None)
class StructuredInterfaceOutput(msrest.serialization.Model):
"""StructuredInterfaceOutput.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar data_type_id:
:vartype data_type_id: str
:ivar pass_through_data_type_input_name:
:vartype pass_through_data_type_input_name: str
:ivar description:
:vartype description: str
:ivar skip_processing:
:vartype skip_processing: bool
:ivar is_artifact:
:vartype is_artifact: bool
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar training_output:
:vartype training_output: ~flow.models.TrainingOutput
:ivar dataset_output:
:vartype dataset_output: ~flow.models.DatasetOutput
:ivar asset_output_settings:
:vartype asset_output_settings: ~flow.models.AssetOutputSettings
:ivar early_available:
:vartype early_available: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
'pass_through_data_type_input_name': {'key': 'passThroughDataTypeInputName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'skip_processing': {'key': 'skipProcessing', 'type': 'bool'},
'is_artifact': {'key': 'IsArtifact', 'type': 'bool'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'training_output': {'key': 'trainingOutput', 'type': 'TrainingOutput'},
'dataset_output': {'key': 'datasetOutput', 'type': 'DatasetOutput'},
'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AssetOutputSettings'},
'early_available': {'key': 'EarlyAvailable', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword data_type_id:
:paramtype data_type_id: str
:keyword pass_through_data_type_input_name:
:paramtype pass_through_data_type_input_name: str
:keyword description:
:paramtype description: str
:keyword skip_processing:
:paramtype skip_processing: bool
:keyword is_artifact:
:paramtype is_artifact: bool
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword training_output:
:paramtype training_output: ~flow.models.TrainingOutput
:keyword dataset_output:
:paramtype dataset_output: ~flow.models.DatasetOutput
:keyword asset_output_settings:
:paramtype asset_output_settings: ~flow.models.AssetOutputSettings
:keyword early_available:
:paramtype early_available: bool
"""
super(StructuredInterfaceOutput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.label = kwargs.get('label', None)
self.data_type_id = kwargs.get('data_type_id', None)
self.pass_through_data_type_input_name = kwargs.get('pass_through_data_type_input_name', None)
self.description = kwargs.get('description', None)
self.skip_processing = kwargs.get('skip_processing', None)
self.is_artifact = kwargs.get('is_artifact', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.overwrite = kwargs.get('overwrite', None)
self.data_reference_name = kwargs.get('data_reference_name', None)
self.training_output = kwargs.get('training_output', None)
self.dataset_output = kwargs.get('dataset_output', None)
self.asset_output_settings = kwargs.get('asset_output_settings', None)
self.early_available = kwargs.get('early_available', None)
class StructuredInterfaceParameter(msrest.serialization.Model):
"""StructuredInterfaceParameter.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar parameter_type: Possible values include: "Int", "Double", "Bool", "String", "Undefined".
:vartype parameter_type: str or ~flow.models.ParameterType
:ivar is_optional:
:vartype is_optional: bool
:ivar default_value:
:vartype default_value: str
:ivar lower_bound:
:vartype lower_bound: str
:ivar upper_bound:
:vartype upper_bound: str
:ivar enum_values:
:vartype enum_values: list[str]
:ivar enum_values_to_argument_strings: This is a dictionary.
:vartype enum_values_to_argument_strings: dict[str, str]
:ivar description:
:vartype description: str
:ivar set_environment_variable:
:vartype set_environment_variable: bool
:ivar environment_variable_override:
:vartype environment_variable_override: str
:ivar enabled_by_parameter_name:
:vartype enabled_by_parameter_name: str
:ivar enabled_by_parameter_values:
:vartype enabled_by_parameter_values: list[str]
:ivar ui_hint:
:vartype ui_hint: ~flow.models.UIParameterHint
:ivar group_names:
:vartype group_names: list[str]
:ivar argument_name:
:vartype argument_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'parameter_type': {'key': 'parameterType', 'type': 'str'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'lower_bound': {'key': 'lowerBound', 'type': 'str'},
'upper_bound': {'key': 'upperBound', 'type': 'str'},
'enum_values': {'key': 'enumValues', 'type': '[str]'},
'enum_values_to_argument_strings': {'key': 'enumValuesToArgumentStrings', 'type': '{str}'},
'description': {'key': 'description', 'type': 'str'},
'set_environment_variable': {'key': 'setEnvironmentVariable', 'type': 'bool'},
'environment_variable_override': {'key': 'environmentVariableOverride', 'type': 'str'},
'enabled_by_parameter_name': {'key': 'enabledByParameterName', 'type': 'str'},
'enabled_by_parameter_values': {'key': 'enabledByParameterValues', 'type': '[str]'},
'ui_hint': {'key': 'uiHint', 'type': 'UIParameterHint'},
'group_names': {'key': 'groupNames', 'type': '[str]'},
'argument_name': {'key': 'argumentName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword parameter_type: Possible values include: "Int", "Double", "Bool", "String",
"Undefined".
:paramtype parameter_type: str or ~flow.models.ParameterType
:keyword is_optional:
:paramtype is_optional: bool
:keyword default_value:
:paramtype default_value: str
:keyword lower_bound:
:paramtype lower_bound: str
:keyword upper_bound:
:paramtype upper_bound: str
:keyword enum_values:
:paramtype enum_values: list[str]
:keyword enum_values_to_argument_strings: This is a dictionary.
:paramtype enum_values_to_argument_strings: dict[str, str]
:keyword description:
:paramtype description: str
:keyword set_environment_variable:
:paramtype set_environment_variable: bool
:keyword environment_variable_override:
:paramtype environment_variable_override: str
:keyword enabled_by_parameter_name:
:paramtype enabled_by_parameter_name: str
:keyword enabled_by_parameter_values:
:paramtype enabled_by_parameter_values: list[str]
:keyword ui_hint:
:paramtype ui_hint: ~flow.models.UIParameterHint
:keyword group_names:
:paramtype group_names: list[str]
:keyword argument_name:
:paramtype argument_name: str
"""
super(StructuredInterfaceParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.label = kwargs.get('label', None)
self.parameter_type = kwargs.get('parameter_type', None)
self.is_optional = kwargs.get('is_optional', None)
self.default_value = kwargs.get('default_value', None)
self.lower_bound = kwargs.get('lower_bound', None)
self.upper_bound = kwargs.get('upper_bound', None)
self.enum_values = kwargs.get('enum_values', None)
self.enum_values_to_argument_strings = kwargs.get('enum_values_to_argument_strings', None)
self.description = kwargs.get('description', None)
self.set_environment_variable = kwargs.get('set_environment_variable', None)
self.environment_variable_override = kwargs.get('environment_variable_override', None)
self.enabled_by_parameter_name = kwargs.get('enabled_by_parameter_name', None)
self.enabled_by_parameter_values = kwargs.get('enabled_by_parameter_values', None)
self.ui_hint = kwargs.get('ui_hint', None)
self.group_names = kwargs.get('group_names', None)
self.argument_name = kwargs.get('argument_name', None)
class StudioMigrationInfo(msrest.serialization.Model):
"""StudioMigrationInfo.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar source_workspace_id:
:vartype source_workspace_id: str
:ivar source_experiment_id:
:vartype source_experiment_id: str
:ivar source_experiment_link:
:vartype source_experiment_link: str
:ivar failed_node_id_list:
:vartype failed_node_id_list: list[str]
:ivar error_message:
:vartype error_message: str
"""
_validation = {
'error_message': {'readonly': True},
}
_attribute_map = {
'source_workspace_id': {'key': 'sourceWorkspaceId', 'type': 'str'},
'source_experiment_id': {'key': 'sourceExperimentId', 'type': 'str'},
'source_experiment_link': {'key': 'sourceExperimentLink', 'type': 'str'},
'failed_node_id_list': {'key': 'failedNodeIdList', 'type': '[str]'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword source_workspace_id:
:paramtype source_workspace_id: str
:keyword source_experiment_id:
:paramtype source_experiment_id: str
:keyword source_experiment_link:
:paramtype source_experiment_link: str
:keyword failed_node_id_list:
:paramtype failed_node_id_list: list[str]
"""
super(StudioMigrationInfo, self).__init__(**kwargs)
self.source_workspace_id = kwargs.get('source_workspace_id', None)
self.source_experiment_id = kwargs.get('source_experiment_id', None)
self.source_experiment_link = kwargs.get('source_experiment_link', None)
self.failed_node_id_list = kwargs.get('failed_node_id_list', None)
self.error_message = None
class SubGraphConcatenateAssignment(msrest.serialization.Model):
"""SubGraphConcatenateAssignment.
:ivar concatenate_parameter:
:vartype concatenate_parameter: list[~flow.models.ParameterAssignment]
:ivar parameter_assignments:
:vartype parameter_assignments: ~flow.models.SubPipelineParameterAssignment
"""
_attribute_map = {
'concatenate_parameter': {'key': 'concatenateParameter', 'type': '[ParameterAssignment]'},
'parameter_assignments': {'key': 'parameterAssignments', 'type': 'SubPipelineParameterAssignment'},
}
def __init__(
self,
**kwargs
):
"""
:keyword concatenate_parameter:
:paramtype concatenate_parameter: list[~flow.models.ParameterAssignment]
:keyword parameter_assignments:
:paramtype parameter_assignments: ~flow.models.SubPipelineParameterAssignment
"""
super(SubGraphConcatenateAssignment, self).__init__(**kwargs)
self.concatenate_parameter = kwargs.get('concatenate_parameter', None)
self.parameter_assignments = kwargs.get('parameter_assignments', None)
class SubGraphConfiguration(msrest.serialization.Model):
"""SubGraphConfiguration.
:ivar graph_id:
:vartype graph_id: str
:ivar graph_draft_id:
:vartype graph_draft_id: str
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.CloudPrioritySetting
:ivar is_dynamic:
:vartype is_dynamic: bool
"""
_attribute_map = {
'graph_id': {'key': 'graphId', 'type': 'str'},
'graph_draft_id': {'key': 'graphDraftId', 'type': 'str'},
'default_cloud_priority': {'key': 'DefaultCloudPriority', 'type': 'CloudPrioritySetting'},
'is_dynamic': {'key': 'IsDynamic', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword graph_id:
:paramtype graph_id: str
:keyword graph_draft_id:
:paramtype graph_draft_id: str
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting
:keyword is_dynamic:
:paramtype is_dynamic: bool
"""
super(SubGraphConfiguration, self).__init__(**kwargs)
self.graph_id = kwargs.get('graph_id', None)
self.graph_draft_id = kwargs.get('graph_draft_id', None)
self.default_cloud_priority = kwargs.get('default_cloud_priority', None)
self.is_dynamic = kwargs.get('is_dynamic', False)
class SubGraphConnectionInfo(msrest.serialization.Model):
"""SubGraphConnectionInfo.
:ivar node_id:
:vartype node_id: str
:ivar port_name:
:vartype port_name: str
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword port_name:
:paramtype port_name: str
"""
super(SubGraphConnectionInfo, self).__init__(**kwargs)
self.node_id = kwargs.get('node_id', None)
self.port_name = kwargs.get('port_name', None)
class SubGraphDataPathParameterAssignment(msrest.serialization.Model):
"""SubGraphDataPathParameterAssignment.
:ivar data_set_path_parameter:
:vartype data_set_path_parameter: ~flow.models.DataSetPathParameter
:ivar data_set_path_parameter_assignments:
:vartype data_set_path_parameter_assignments: list[str]
"""
_attribute_map = {
'data_set_path_parameter': {'key': 'dataSetPathParameter', 'type': 'DataSetPathParameter'},
'data_set_path_parameter_assignments': {'key': 'dataSetPathParameterAssignments', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_set_path_parameter:
:paramtype data_set_path_parameter: ~flow.models.DataSetPathParameter
:keyword data_set_path_parameter_assignments:
:paramtype data_set_path_parameter_assignments: list[str]
"""
super(SubGraphDataPathParameterAssignment, self).__init__(**kwargs)
self.data_set_path_parameter = kwargs.get('data_set_path_parameter', None)
self.data_set_path_parameter_assignments = kwargs.get('data_set_path_parameter_assignments', None)
class SubGraphInfo(msrest.serialization.Model):
"""SubGraphInfo.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar default_compute_target:
:vartype default_compute_target: ~flow.models.ComputeSetting
:ivar default_data_store:
:vartype default_data_store: ~flow.models.DatastoreSetting
:ivar id:
:vartype id: str
:ivar parent_graph_id:
:vartype parent_graph_id: str
:ivar pipeline_definition_id:
:vartype pipeline_definition_id: str
:ivar sub_graph_parameter_assignment:
:vartype sub_graph_parameter_assignment: list[~flow.models.SubGraphParameterAssignment]
:ivar sub_graph_concatenate_assignment:
:vartype sub_graph_concatenate_assignment: list[~flow.models.SubGraphConcatenateAssignment]
:ivar sub_graph_data_path_parameter_assignment:
:vartype sub_graph_data_path_parameter_assignment:
list[~flow.models.SubGraphDataPathParameterAssignment]
:ivar sub_graph_default_compute_target_nodes:
:vartype sub_graph_default_compute_target_nodes: list[str]
:ivar sub_graph_default_data_store_nodes:
:vartype sub_graph_default_data_store_nodes: list[str]
:ivar inputs:
:vartype inputs: list[~flow.models.SubGraphPortInfo]
:ivar outputs:
:vartype outputs: list[~flow.models.SubGraphPortInfo]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'default_compute_target': {'key': 'defaultComputeTarget', 'type': 'ComputeSetting'},
'default_data_store': {'key': 'defaultDataStore', 'type': 'DatastoreSetting'},
'id': {'key': 'id', 'type': 'str'},
'parent_graph_id': {'key': 'parentGraphId', 'type': 'str'},
'pipeline_definition_id': {'key': 'pipelineDefinitionId', 'type': 'str'},
'sub_graph_parameter_assignment': {'key': 'subGraphParameterAssignment', 'type': '[SubGraphParameterAssignment]'},
'sub_graph_concatenate_assignment': {'key': 'subGraphConcatenateAssignment', 'type': '[SubGraphConcatenateAssignment]'},
'sub_graph_data_path_parameter_assignment': {'key': 'subGraphDataPathParameterAssignment', 'type': '[SubGraphDataPathParameterAssignment]'},
'sub_graph_default_compute_target_nodes': {'key': 'subGraphDefaultComputeTargetNodes', 'type': '[str]'},
'sub_graph_default_data_store_nodes': {'key': 'subGraphDefaultDataStoreNodes', 'type': '[str]'},
'inputs': {'key': 'inputs', 'type': '[SubGraphPortInfo]'},
'outputs': {'key': 'outputs', 'type': '[SubGraphPortInfo]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword default_compute_target:
:paramtype default_compute_target: ~flow.models.ComputeSetting
:keyword default_data_store:
:paramtype default_data_store: ~flow.models.DatastoreSetting
:keyword id:
:paramtype id: str
:keyword parent_graph_id:
:paramtype parent_graph_id: str
:keyword pipeline_definition_id:
:paramtype pipeline_definition_id: str
:keyword sub_graph_parameter_assignment:
:paramtype sub_graph_parameter_assignment: list[~flow.models.SubGraphParameterAssignment]
:keyword sub_graph_concatenate_assignment:
:paramtype sub_graph_concatenate_assignment: list[~flow.models.SubGraphConcatenateAssignment]
:keyword sub_graph_data_path_parameter_assignment:
:paramtype sub_graph_data_path_parameter_assignment:
list[~flow.models.SubGraphDataPathParameterAssignment]
:keyword sub_graph_default_compute_target_nodes:
:paramtype sub_graph_default_compute_target_nodes: list[str]
:keyword sub_graph_default_data_store_nodes:
:paramtype sub_graph_default_data_store_nodes: list[str]
:keyword inputs:
:paramtype inputs: list[~flow.models.SubGraphPortInfo]
:keyword outputs:
:paramtype outputs: list[~flow.models.SubGraphPortInfo]
"""
super(SubGraphInfo, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.default_compute_target = kwargs.get('default_compute_target', None)
self.default_data_store = kwargs.get('default_data_store', None)
self.id = kwargs.get('id', None)
self.parent_graph_id = kwargs.get('parent_graph_id', None)
self.pipeline_definition_id = kwargs.get('pipeline_definition_id', None)
self.sub_graph_parameter_assignment = kwargs.get('sub_graph_parameter_assignment', None)
self.sub_graph_concatenate_assignment = kwargs.get('sub_graph_concatenate_assignment', None)
self.sub_graph_data_path_parameter_assignment = kwargs.get('sub_graph_data_path_parameter_assignment', None)
self.sub_graph_default_compute_target_nodes = kwargs.get('sub_graph_default_compute_target_nodes', None)
self.sub_graph_default_data_store_nodes = kwargs.get('sub_graph_default_data_store_nodes', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
class SubGraphParameterAssignment(msrest.serialization.Model):
"""SubGraphParameterAssignment.
:ivar parameter:
:vartype parameter: ~flow.models.Parameter
:ivar parameter_assignments:
:vartype parameter_assignments: list[~flow.models.SubPipelineParameterAssignment]
"""
_attribute_map = {
'parameter': {'key': 'parameter', 'type': 'Parameter'},
'parameter_assignments': {'key': 'parameterAssignments', 'type': '[SubPipelineParameterAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword parameter:
:paramtype parameter: ~flow.models.Parameter
:keyword parameter_assignments:
:paramtype parameter_assignments: list[~flow.models.SubPipelineParameterAssignment]
"""
super(SubGraphParameterAssignment, self).__init__(**kwargs)
self.parameter = kwargs.get('parameter', None)
self.parameter_assignments = kwargs.get('parameter_assignments', None)
class SubGraphPortInfo(msrest.serialization.Model):
"""SubGraphPortInfo.
:ivar name:
:vartype name: str
:ivar internal:
:vartype internal: list[~flow.models.SubGraphConnectionInfo]
:ivar external:
:vartype external: list[~flow.models.SubGraphConnectionInfo]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'internal': {'key': 'internal', 'type': '[SubGraphConnectionInfo]'},
'external': {'key': 'external', 'type': '[SubGraphConnectionInfo]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword internal:
:paramtype internal: list[~flow.models.SubGraphConnectionInfo]
:keyword external:
:paramtype external: list[~flow.models.SubGraphConnectionInfo]
"""
super(SubGraphPortInfo, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.internal = kwargs.get('internal', None)
self.external = kwargs.get('external', None)
class SubmitBulkRunRequest(msrest.serialization.Model):
"""SubmitBulkRunRequest.
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar flow_definition_resource_id:
:vartype flow_definition_resource_id: str
:ivar flow_definition_data_store_name:
:vartype flow_definition_data_store_name: str
:ivar flow_definition_blob_path:
:vartype flow_definition_blob_path: str
:ivar flow_definition_data_uri:
:vartype flow_definition_data_uri: str
:ivar run_id:
:vartype run_id: str
:ivar run_display_name:
:vartype run_display_name: str
:ivar run_experiment_name:
:vartype run_experiment_name: str
:ivar node_variant:
:vartype node_variant: str
:ivar variant_run_id:
:vartype variant_run_id: str
:ivar baseline_run_id:
:vartype baseline_run_id: str
:ivar session_id:
:vartype session_id: str
:ivar session_setup_mode: Possible values include: "ClientWait", "SystemWait".
:vartype session_setup_mode: str or ~flow.models.SessionSetupModeEnum
:ivar session_config_mode: Possible values include: "Default", "ForceInstallPackage",
"ForceReset".
:vartype session_config_mode: str or ~flow.models.SessionConfigModeEnum
:ivar flow_lineage_id:
:vartype flow_lineage_id: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
:ivar compute_name:
:vartype compute_name: str
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar runtime_name:
:vartype runtime_name: str
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar connections: This is a dictionary.
:vartype connections: dict[str, dict[str, str]]
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, str]
:ivar output_data_store:
:vartype output_data_store: str
:ivar run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:vartype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar worker_count:
:vartype worker_count: int
:ivar timeout_in_seconds:
:vartype timeout_in_seconds: int
:ivar promptflow_engine_type: Possible values include: "FastEngine", "ScalableEngine".
:vartype promptflow_engine_type: str or ~flow.models.PromptflowEngineType
"""
_attribute_map = {
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'flow_definition_resource_id': {'key': 'flowDefinitionResourceId', 'type': 'str'},
'flow_definition_data_store_name': {'key': 'flowDefinitionDataStoreName', 'type': 'str'},
'flow_definition_blob_path': {'key': 'flowDefinitionBlobPath', 'type': 'str'},
'flow_definition_data_uri': {'key': 'flowDefinitionDataUri', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'run_display_name': {'key': 'runDisplayName', 'type': 'str'},
'run_experiment_name': {'key': 'runExperimentName', 'type': 'str'},
'node_variant': {'key': 'nodeVariant', 'type': 'str'},
'variant_run_id': {'key': 'variantRunId', 'type': 'str'},
'baseline_run_id': {'key': 'baselineRunId', 'type': 'str'},
'session_id': {'key': 'sessionId', 'type': 'str'},
'session_setup_mode': {'key': 'sessionSetupMode', 'type': 'str'},
'session_config_mode': {'key': 'sessionConfigMode', 'type': 'str'},
'flow_lineage_id': {'key': 'flowLineageId', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
'compute_name': {'key': 'computeName', 'type': 'str'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'connections': {'key': 'connections', 'type': '{{str}}'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'output_data_store': {'key': 'outputDataStore', 'type': 'str'},
'run_display_name_generation_type': {'key': 'runDisplayNameGenerationType', 'type': 'str'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'worker_count': {'key': 'workerCount', 'type': 'int'},
'timeout_in_seconds': {'key': 'timeoutInSeconds', 'type': 'int'},
'promptflow_engine_type': {'key': 'promptflowEngineType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword flow_definition_resource_id:
:paramtype flow_definition_resource_id: str
:keyword flow_definition_data_store_name:
:paramtype flow_definition_data_store_name: str
:keyword flow_definition_blob_path:
:paramtype flow_definition_blob_path: str
:keyword flow_definition_data_uri:
:paramtype flow_definition_data_uri: str
:keyword run_id:
:paramtype run_id: str
:keyword run_display_name:
:paramtype run_display_name: str
:keyword run_experiment_name:
:paramtype run_experiment_name: str
:keyword node_variant:
:paramtype node_variant: str
:keyword variant_run_id:
:paramtype variant_run_id: str
:keyword baseline_run_id:
:paramtype baseline_run_id: str
:keyword session_id:
:paramtype session_id: str
:keyword session_setup_mode: Possible values include: "ClientWait", "SystemWait".
:paramtype session_setup_mode: str or ~flow.models.SessionSetupModeEnum
:keyword session_config_mode: Possible values include: "Default", "ForceInstallPackage",
"ForceReset".
:paramtype session_config_mode: str or ~flow.models.SessionConfigModeEnum
:keyword flow_lineage_id:
:paramtype flow_lineage_id: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
:keyword compute_name:
:paramtype compute_name: str
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword runtime_name:
:paramtype runtime_name: str
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword connections: This is a dictionary.
:paramtype connections: dict[str, dict[str, str]]
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, str]
:keyword output_data_store:
:paramtype output_data_store: str
:keyword run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:paramtype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword worker_count:
:paramtype worker_count: int
:keyword timeout_in_seconds:
:paramtype timeout_in_seconds: int
:keyword promptflow_engine_type: Possible values include: "FastEngine", "ScalableEngine".
:paramtype promptflow_engine_type: str or ~flow.models.PromptflowEngineType
"""
super(SubmitBulkRunRequest, self).__init__(**kwargs)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.flow_definition_resource_id = kwargs.get('flow_definition_resource_id', None)
self.flow_definition_data_store_name = kwargs.get('flow_definition_data_store_name', None)
self.flow_definition_blob_path = kwargs.get('flow_definition_blob_path', None)
self.flow_definition_data_uri = kwargs.get('flow_definition_data_uri', None)
self.run_id = kwargs.get('run_id', None)
self.run_display_name = kwargs.get('run_display_name', None)
self.run_experiment_name = kwargs.get('run_experiment_name', None)
self.node_variant = kwargs.get('node_variant', None)
self.variant_run_id = kwargs.get('variant_run_id', None)
self.baseline_run_id = kwargs.get('baseline_run_id', None)
self.session_id = kwargs.get('session_id', None)
self.session_setup_mode = kwargs.get('session_setup_mode', None)
self.session_config_mode = kwargs.get('session_config_mode', None)
self.flow_lineage_id = kwargs.get('flow_lineage_id', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.identity = kwargs.get('identity', None)
self.compute_name = kwargs.get('compute_name', None)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
self.inputs_mapping = kwargs.get('inputs_mapping', None)
self.connections = kwargs.get('connections', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.output_data_store = kwargs.get('output_data_store', None)
self.run_display_name_generation_type = kwargs.get('run_display_name_generation_type', None)
self.aml_compute_name = kwargs.get('aml_compute_name', None)
self.worker_count = kwargs.get('worker_count', None)
self.timeout_in_seconds = kwargs.get('timeout_in_seconds', None)
self.promptflow_engine_type = kwargs.get('promptflow_engine_type', None)
class SubmitBulkRunResponse(msrest.serialization.Model):
"""SubmitBulkRunResponse.
:ivar next_action_interval_in_seconds:
:vartype next_action_interval_in_seconds: int
:ivar action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent", "SubmitFlowRun".
:vartype action_type: str or ~flow.models.ActionType
:ivar flow_runs:
:vartype flow_runs: list[any]
:ivar node_runs:
:vartype node_runs: list[any]
:ivar error_response: The error response.
:vartype error_response: ~flow.models.ErrorResponse
:ivar flow_name:
:vartype flow_name: str
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_graph:
:vartype flow_graph: ~flow.models.FlowGraph
:ivar flow_graph_layout:
:vartype flow_graph_layout: ~flow.models.FlowGraphLayout
:ivar flow_run_resource_id:
:vartype flow_run_resource_id: str
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:vartype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar runtime_name:
:vartype runtime_name: str
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar flow_run_logs: Dictionary of :code:`<string>`.
:vartype flow_run_logs: dict[str, str]
:ivar flow_test_mode: Possible values include: "Sync", "Async".
:vartype flow_test_mode: str or ~flow.models.FlowTestMode
:ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:ivar working_directory:
:vartype working_directory: str
:ivar flow_dag_file_relative_path:
:vartype flow_dag_file_relative_path: str
:ivar flow_snapshot_id:
:vartype flow_snapshot_id: str
:ivar variant_run_to_evaluation_runs_id_mapping: Dictionary of
<components·1mlssi7·schemas·submitbulkrunresponse·properties·variantruntoevaluationrunsidmapping·additionalproperties>.
:vartype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]]
"""
_attribute_map = {
'next_action_interval_in_seconds': {'key': 'nextActionIntervalInSeconds', 'type': 'int'},
'action_type': {'key': 'actionType', 'type': 'str'},
'flow_runs': {'key': 'flow_runs', 'type': '[object]'},
'node_runs': {'key': 'node_runs', 'type': '[object]'},
'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'},
'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'},
'flow_run_resource_id': {'key': 'flowRunResourceId', 'type': 'str'},
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'batch_inputs': {'key': 'batchInputs', 'type': '[{object}]'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'flow_run_type': {'key': 'flowRunType', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'flow_run_logs': {'key': 'flowRunLogs', 'type': '{str}'},
'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'},
'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'},
'working_directory': {'key': 'workingDirectory', 'type': 'str'},
'flow_dag_file_relative_path': {'key': 'flowDagFileRelativePath', 'type': 'str'},
'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'},
'variant_run_to_evaluation_runs_id_mapping': {'key': 'variantRunToEvaluationRunsIdMapping', 'type': '{[str]}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword next_action_interval_in_seconds:
:paramtype next_action_interval_in_seconds: int
:keyword action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent", "SubmitFlowRun".
:paramtype action_type: str or ~flow.models.ActionType
:keyword flow_runs:
:paramtype flow_runs: list[any]
:keyword node_runs:
:paramtype node_runs: list[any]
:keyword error_response: The error response.
:paramtype error_response: ~flow.models.ErrorResponse
:keyword flow_name:
:paramtype flow_name: str
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_graph:
:paramtype flow_graph: ~flow.models.FlowGraph
:keyword flow_graph_layout:
:paramtype flow_graph_layout: ~flow.models.FlowGraphLayout
:keyword flow_run_resource_id:
:paramtype flow_run_resource_id: str
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:paramtype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword runtime_name:
:paramtype runtime_name: str
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword flow_run_logs: Dictionary of :code:`<string>`.
:paramtype flow_run_logs: dict[str, str]
:keyword flow_test_mode: Possible values include: "Sync", "Async".
:paramtype flow_test_mode: str or ~flow.models.FlowTestMode
:keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:keyword working_directory:
:paramtype working_directory: str
:keyword flow_dag_file_relative_path:
:paramtype flow_dag_file_relative_path: str
:keyword flow_snapshot_id:
:paramtype flow_snapshot_id: str
:keyword variant_run_to_evaluation_runs_id_mapping: Dictionary of
<components·1mlssi7·schemas·submitbulkrunresponse·properties·variantruntoevaluationrunsidmapping·additionalproperties>.
:paramtype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]]
"""
super(SubmitBulkRunResponse, self).__init__(**kwargs)
self.next_action_interval_in_seconds = kwargs.get('next_action_interval_in_seconds', None)
self.action_type = kwargs.get('action_type', None)
self.flow_runs = kwargs.get('flow_runs', None)
self.node_runs = kwargs.get('node_runs', None)
self.error_response = kwargs.get('error_response', None)
self.flow_name = kwargs.get('flow_name', None)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.flow_run_id = kwargs.get('flow_run_id', None)
self.flow_graph = kwargs.get('flow_graph', None)
self.flow_graph_layout = kwargs.get('flow_graph_layout', None)
self.flow_run_resource_id = kwargs.get('flow_run_resource_id', None)
self.bulk_test_id = kwargs.get('bulk_test_id', None)
self.batch_inputs = kwargs.get('batch_inputs', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
self.created_by = kwargs.get('created_by', None)
self.created_on = kwargs.get('created_on', None)
self.flow_run_type = kwargs.get('flow_run_type', None)
self.flow_type = kwargs.get('flow_type', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.aml_compute_name = kwargs.get('aml_compute_name', None)
self.flow_run_logs = kwargs.get('flow_run_logs', None)
self.flow_test_mode = kwargs.get('flow_test_mode', None)
self.flow_test_infos = kwargs.get('flow_test_infos', None)
self.working_directory = kwargs.get('working_directory', None)
self.flow_dag_file_relative_path = kwargs.get('flow_dag_file_relative_path', None)
self.flow_snapshot_id = kwargs.get('flow_snapshot_id', None)
self.variant_run_to_evaluation_runs_id_mapping = kwargs.get('variant_run_to_evaluation_runs_id_mapping', None)
class SubmitFlowRequest(msrest.serialization.Model):
"""SubmitFlowRequest.
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar flow_id:
:vartype flow_id: str
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_submit_run_settings:
:vartype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:ivar async_submission:
:vartype async_submission: bool
:ivar use_workspace_connection:
:vartype use_workspace_connection: bool
:ivar enable_async_flow_test:
:vartype enable_async_flow_test: bool
:ivar run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:vartype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
"""
_attribute_map = {
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'flow_id': {'key': 'flowId', 'type': 'str'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_submit_run_settings': {'key': 'flowSubmitRunSettings', 'type': 'FlowSubmitRunSettings'},
'async_submission': {'key': 'asyncSubmission', 'type': 'bool'},
'use_workspace_connection': {'key': 'useWorkspaceConnection', 'type': 'bool'},
'enable_async_flow_test': {'key': 'enableAsyncFlowTest', 'type': 'bool'},
'run_display_name_generation_type': {'key': 'runDisplayNameGenerationType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword flow_id:
:paramtype flow_id: str
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_submit_run_settings:
:paramtype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:keyword async_submission:
:paramtype async_submission: bool
:keyword use_workspace_connection:
:paramtype use_workspace_connection: bool
:keyword enable_async_flow_test:
:paramtype enable_async_flow_test: bool
:keyword run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:paramtype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
"""
super(SubmitFlowRequest, self).__init__(**kwargs)
self.flow_run_id = kwargs.get('flow_run_id', None)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.flow_id = kwargs.get('flow_id', None)
self.flow = kwargs.get('flow', None)
self.flow_submit_run_settings = kwargs.get('flow_submit_run_settings', None)
self.async_submission = kwargs.get('async_submission', None)
self.use_workspace_connection = kwargs.get('use_workspace_connection', None)
self.enable_async_flow_test = kwargs.get('enable_async_flow_test', None)
self.run_display_name_generation_type = kwargs.get('run_display_name_generation_type', None)
class SubmitPipelineRunRequest(msrest.serialization.Model):
"""SubmitPipelineRunRequest.
:ivar compute_target:
:vartype compute_target: str
:ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:ivar step_tags: This is a dictionary.
:vartype step_tags: dict[str, str]
:ivar experiment_name:
:vartype experiment_name: str
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar enable_notification:
:vartype enable_notification: bool
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar display_name:
:vartype display_name: str
:ivar run_id:
:vartype run_id: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "AssetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI",
"AssetInDpv2WithDatasetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI",
"DatasetAndAssetInDpv2WithDatasetInDesignerUI", "Dataset", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'compute_target': {'key': 'computeTarget', 'type': 'str'},
'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'},
'step_tags': {'key': 'stepTags', 'type': '{str}'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'enable_notification': {'key': 'enableNotification', 'type': 'bool'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'display_name': {'key': 'displayName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword compute_target:
:paramtype compute_target: str
:keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:keyword step_tags: This is a dictionary.
:paramtype step_tags: dict[str, str]
:keyword experiment_name:
:paramtype experiment_name: str
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword enable_notification:
:paramtype enable_notification: bool
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword display_name:
:paramtype display_name: str
:keyword run_id:
:paramtype run_id: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "AssetInDesignerUI",
"DatasetInDpv2WithDatasetInDesignerUI", "AssetInDpv2WithDatasetInDesignerUI",
"AssetInDpv2WithAssetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI", "Dataset",
"Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(SubmitPipelineRunRequest, self).__init__(**kwargs)
self.compute_target = kwargs.get('compute_target', None)
self.flattened_sub_graphs = kwargs.get('flattened_sub_graphs', None)
self.step_tags = kwargs.get('step_tags', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.pipeline_parameters = kwargs.get('pipeline_parameters', None)
self.data_path_assignments = kwargs.get('data_path_assignments', None)
self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None)
self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None)
self.enable_notification = kwargs.get('enable_notification', None)
self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None)
self.display_name = kwargs.get('display_name', None)
self.run_id = kwargs.get('run_id', None)
self.parent_run_id = kwargs.get('parent_run_id', None)
self.graph = kwargs.get('graph', None)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.module_node_run_settings = kwargs.get('module_node_run_settings', None)
self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None)
self.tags = kwargs.get('tags', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.description = kwargs.get('description', None)
self.properties = kwargs.get('properties', None)
self.enforce_rerun = kwargs.get('enforce_rerun', None)
self.dataset_access_modes = kwargs.get('dataset_access_modes', None)
class SubPipelineDefinition(msrest.serialization.Model):
"""SubPipelineDefinition.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar default_compute_target:
:vartype default_compute_target: ~flow.models.ComputeSetting
:ivar default_data_store:
:vartype default_data_store: ~flow.models.DatastoreSetting
:ivar pipeline_function_name:
:vartype pipeline_function_name: str
:ivar id:
:vartype id: str
:ivar parent_definition_id:
:vartype parent_definition_id: str
:ivar from_module_name:
:vartype from_module_name: str
:ivar parameter_list:
:vartype parameter_list: list[~flow.models.Kwarg]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'default_compute_target': {'key': 'defaultComputeTarget', 'type': 'ComputeSetting'},
'default_data_store': {'key': 'defaultDataStore', 'type': 'DatastoreSetting'},
'pipeline_function_name': {'key': 'pipelineFunctionName', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'parent_definition_id': {'key': 'parentDefinitionId', 'type': 'str'},
'from_module_name': {'key': 'fromModuleName', 'type': 'str'},
'parameter_list': {'key': 'parameterList', 'type': '[Kwarg]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword default_compute_target:
:paramtype default_compute_target: ~flow.models.ComputeSetting
:keyword default_data_store:
:paramtype default_data_store: ~flow.models.DatastoreSetting
:keyword pipeline_function_name:
:paramtype pipeline_function_name: str
:keyword id:
:paramtype id: str
:keyword parent_definition_id:
:paramtype parent_definition_id: str
:keyword from_module_name:
:paramtype from_module_name: str
:keyword parameter_list:
:paramtype parameter_list: list[~flow.models.Kwarg]
"""
super(SubPipelineDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.default_compute_target = kwargs.get('default_compute_target', None)
self.default_data_store = kwargs.get('default_data_store', None)
self.pipeline_function_name = kwargs.get('pipeline_function_name', None)
self.id = kwargs.get('id', None)
self.parent_definition_id = kwargs.get('parent_definition_id', None)
self.from_module_name = kwargs.get('from_module_name', None)
self.parameter_list = kwargs.get('parameter_list', None)
class SubPipelineParameterAssignment(msrest.serialization.Model):
"""SubPipelineParameterAssignment.
:ivar node_id:
:vartype node_id: str
:ivar parameter_name:
:vartype parameter_name: str
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword parameter_name:
:paramtype parameter_name: str
"""
super(SubPipelineParameterAssignment, self).__init__(**kwargs)
self.node_id = kwargs.get('node_id', None)
self.parameter_name = kwargs.get('parameter_name', None)
class SubPipelinesInfo(msrest.serialization.Model):
"""SubPipelinesInfo.
:ivar sub_graph_info:
:vartype sub_graph_info: list[~flow.models.SubGraphInfo]
:ivar node_id_to_sub_graph_id_mapping: Dictionary of :code:`<string>`.
:vartype node_id_to_sub_graph_id_mapping: dict[str, str]
:ivar sub_pipeline_definition:
:vartype sub_pipeline_definition: list[~flow.models.SubPipelineDefinition]
"""
_attribute_map = {
'sub_graph_info': {'key': 'subGraphInfo', 'type': '[SubGraphInfo]'},
'node_id_to_sub_graph_id_mapping': {'key': 'nodeIdToSubGraphIdMapping', 'type': '{str}'},
'sub_pipeline_definition': {'key': 'subPipelineDefinition', 'type': '[SubPipelineDefinition]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword sub_graph_info:
:paramtype sub_graph_info: list[~flow.models.SubGraphInfo]
:keyword node_id_to_sub_graph_id_mapping: Dictionary of :code:`<string>`.
:paramtype node_id_to_sub_graph_id_mapping: dict[str, str]
:keyword sub_pipeline_definition:
:paramtype sub_pipeline_definition: list[~flow.models.SubPipelineDefinition]
"""
super(SubPipelinesInfo, self).__init__(**kwargs)
self.sub_graph_info = kwargs.get('sub_graph_info', None)
self.node_id_to_sub_graph_id_mapping = kwargs.get('node_id_to_sub_graph_id_mapping', None)
self.sub_pipeline_definition = kwargs.get('sub_pipeline_definition', None)
class SubStatusPeriod(msrest.serialization.Model):
"""SubStatusPeriod.
:ivar name:
:vartype name: str
:ivar sub_periods:
:vartype sub_periods: list[~flow.models.SubStatusPeriod]
:ivar start:
:vartype start: long
:ivar end:
:vartype end: long
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'sub_periods': {'key': 'subPeriods', 'type': '[SubStatusPeriod]'},
'start': {'key': 'start', 'type': 'long'},
'end': {'key': 'end', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword sub_periods:
:paramtype sub_periods: list[~flow.models.SubStatusPeriod]
:keyword start:
:paramtype start: long
:keyword end:
:paramtype end: long
"""
super(SubStatusPeriod, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.sub_periods = kwargs.get('sub_periods', None)
self.start = kwargs.get('start', None)
self.end = kwargs.get('end', None)
class SweepEarlyTerminationPolicy(msrest.serialization.Model):
"""SweepEarlyTerminationPolicy.
:ivar policy_type: Possible values include: "Bandit", "MedianStopping", "TruncationSelection".
:vartype policy_type: str or ~flow.models.EarlyTerminationPolicyType
:ivar evaluation_interval:
:vartype evaluation_interval: int
:ivar delay_evaluation:
:vartype delay_evaluation: int
:ivar slack_factor:
:vartype slack_factor: float
:ivar slack_amount:
:vartype slack_amount: float
:ivar truncation_percentage:
:vartype truncation_percentage: int
"""
_attribute_map = {
'policy_type': {'key': 'policyType', 'type': 'str'},
'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
'slack_factor': {'key': 'slackFactor', 'type': 'float'},
'slack_amount': {'key': 'slackAmount', 'type': 'float'},
'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword policy_type: Possible values include: "Bandit", "MedianStopping",
"TruncationSelection".
:paramtype policy_type: str or ~flow.models.EarlyTerminationPolicyType
:keyword evaluation_interval:
:paramtype evaluation_interval: int
:keyword delay_evaluation:
:paramtype delay_evaluation: int
:keyword slack_factor:
:paramtype slack_factor: float
:keyword slack_amount:
:paramtype slack_amount: float
:keyword truncation_percentage:
:paramtype truncation_percentage: int
"""
super(SweepEarlyTerminationPolicy, self).__init__(**kwargs)
self.policy_type = kwargs.get('policy_type', None)
self.evaluation_interval = kwargs.get('evaluation_interval', None)
self.delay_evaluation = kwargs.get('delay_evaluation', None)
self.slack_factor = kwargs.get('slack_factor', None)
self.slack_amount = kwargs.get('slack_amount', None)
self.truncation_percentage = kwargs.get('truncation_percentage', None)
class SweepSettings(msrest.serialization.Model):
"""SweepSettings.
:ivar limits:
:vartype limits: ~flow.models.SweepSettingsLimits
:ivar search_space:
:vartype search_space: list[dict[str, str]]
:ivar sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian".
:vartype sampling_algorithm: str or ~flow.models.SamplingAlgorithmType
:ivar early_termination:
:vartype early_termination: ~flow.models.SweepEarlyTerminationPolicy
"""
_attribute_map = {
'limits': {'key': 'limits', 'type': 'SweepSettingsLimits'},
'search_space': {'key': 'searchSpace', 'type': '[{str}]'},
'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'},
'early_termination': {'key': 'earlyTermination', 'type': 'SweepEarlyTerminationPolicy'},
}
def __init__(
self,
**kwargs
):
"""
:keyword limits:
:paramtype limits: ~flow.models.SweepSettingsLimits
:keyword search_space:
:paramtype search_space: list[dict[str, str]]
:keyword sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian".
:paramtype sampling_algorithm: str or ~flow.models.SamplingAlgorithmType
:keyword early_termination:
:paramtype early_termination: ~flow.models.SweepEarlyTerminationPolicy
"""
super(SweepSettings, self).__init__(**kwargs)
self.limits = kwargs.get('limits', None)
self.search_space = kwargs.get('search_space', None)
self.sampling_algorithm = kwargs.get('sampling_algorithm', None)
self.early_termination = kwargs.get('early_termination', None)
class SweepSettingsLimits(msrest.serialization.Model):
"""SweepSettingsLimits.
:ivar max_total_trials:
:vartype max_total_trials: int
:ivar max_concurrent_trials:
:vartype max_concurrent_trials: int
"""
_attribute_map = {
'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'},
'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_total_trials:
:paramtype max_total_trials: int
:keyword max_concurrent_trials:
:paramtype max_concurrent_trials: int
"""
super(SweepSettingsLimits, self).__init__(**kwargs)
self.max_total_trials = kwargs.get('max_total_trials', None)
self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None)
class SystemData(msrest.serialization.Model):
"""SystemData.
:ivar created_at:
:vartype created_at: ~datetime.datetime
:ivar created_by:
:vartype created_by: str
:ivar created_by_type: Possible values include: "User", "Application", "ManagedIdentity",
"Key".
:vartype created_by_type: str or ~flow.models.UserType
:ivar last_modified_at:
:vartype last_modified_at: ~datetime.datetime
:ivar last_modified_by:
:vartype last_modified_by: str
:ivar last_modified_by_type: Possible values include: "User", "Application", "ManagedIdentity",
"Key".
:vartype last_modified_by_type: str or ~flow.models.UserType
"""
_attribute_map = {
'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
'created_by': {'key': 'createdBy', 'type': 'str'},
'created_by_type': {'key': 'createdByType', 'type': 'str'},
'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword created_at:
:paramtype created_at: ~datetime.datetime
:keyword created_by:
:paramtype created_by: str
:keyword created_by_type: Possible values include: "User", "Application", "ManagedIdentity",
"Key".
:paramtype created_by_type: str or ~flow.models.UserType
:keyword last_modified_at:
:paramtype last_modified_at: ~datetime.datetime
:keyword last_modified_by:
:paramtype last_modified_by: str
:keyword last_modified_by_type: Possible values include: "User", "Application",
"ManagedIdentity", "Key".
:paramtype last_modified_by_type: str or ~flow.models.UserType
"""
super(SystemData, self).__init__(**kwargs)
self.created_at = kwargs.get('created_at', None)
self.created_by = kwargs.get('created_by', None)
self.created_by_type = kwargs.get('created_by_type', None)
self.last_modified_at = kwargs.get('last_modified_at', None)
self.last_modified_by = kwargs.get('last_modified_by', None)
self.last_modified_by_type = kwargs.get('last_modified_by_type', None)
class SystemMeta(msrest.serialization.Model):
"""SystemMeta.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar extra_hash:
:vartype extra_hash: str
:ivar content_hash:
:vartype content_hash: str
:ivar identifier_hashes:
:vartype identifier_hashes: ~flow.models.SystemMetaIdentifierHashes
:ivar extra_hashes:
:vartype extra_hashes: ~flow.models.SystemMetaExtraHashes
"""
_attribute_map = {
'identifier_hash': {'key': 'identifierHash', 'type': 'str'},
'extra_hash': {'key': 'extraHash', 'type': 'str'},
'content_hash': {'key': 'contentHash', 'type': 'str'},
'identifier_hashes': {'key': 'identifierHashes', 'type': 'SystemMetaIdentifierHashes'},
'extra_hashes': {'key': 'extraHashes', 'type': 'SystemMetaExtraHashes'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword extra_hash:
:paramtype extra_hash: str
:keyword content_hash:
:paramtype content_hash: str
:keyword identifier_hashes:
:paramtype identifier_hashes: ~flow.models.SystemMetaIdentifierHashes
:keyword extra_hashes:
:paramtype extra_hashes: ~flow.models.SystemMetaExtraHashes
"""
super(SystemMeta, self).__init__(**kwargs)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.extra_hash = kwargs.get('extra_hash', None)
self.content_hash = kwargs.get('content_hash', None)
self.identifier_hashes = kwargs.get('identifier_hashes', None)
self.extra_hashes = kwargs.get('extra_hashes', None)
class SystemMetaExtraHashes(msrest.serialization.Model):
"""SystemMetaExtraHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(SystemMetaExtraHashes, self).__init__(**kwargs)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None)
class SystemMetaIdentifierHashes(msrest.serialization.Model):
"""SystemMetaIdentifierHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(SystemMetaIdentifierHashes, self).__init__(**kwargs)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None)
class TargetLags(msrest.serialization.Model):
"""TargetLags.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.TargetLagsMode
:ivar values:
:vartype values: list[int]
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'values': {'key': 'values', 'type': '[int]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.TargetLagsMode
:keyword values:
:paramtype values: list[int]
"""
super(TargetLags, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.values = kwargs.get('values', None)
class TargetRollingWindowSize(msrest.serialization.Model):
"""TargetRollingWindowSize.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.TargetRollingWindowSizeMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.TargetRollingWindowSizeMode
:keyword value:
:paramtype value: int
"""
super(TargetRollingWindowSize, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class TargetSelectorConfiguration(msrest.serialization.Model):
"""TargetSelectorConfiguration.
:ivar low_priority_vm_tolerant:
:vartype low_priority_vm_tolerant: bool
:ivar cluster_block_list:
:vartype cluster_block_list: list[str]
:ivar compute_type:
:vartype compute_type: str
:ivar instance_type:
:vartype instance_type: list[str]
:ivar instance_types:
:vartype instance_types: list[str]
:ivar my_resource_only:
:vartype my_resource_only: bool
:ivar plan_id:
:vartype plan_id: str
:ivar plan_region_id:
:vartype plan_region_id: str
:ivar region:
:vartype region: list[str]
:ivar regions:
:vartype regions: list[str]
:ivar vc_block_list:
:vartype vc_block_list: list[str]
"""
_attribute_map = {
'low_priority_vm_tolerant': {'key': 'lowPriorityVMTolerant', 'type': 'bool'},
'cluster_block_list': {'key': 'clusterBlockList', 'type': '[str]'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'instance_type': {'key': 'instanceType', 'type': '[str]'},
'instance_types': {'key': 'instanceTypes', 'type': '[str]'},
'my_resource_only': {'key': 'myResourceOnly', 'type': 'bool'},
'plan_id': {'key': 'planId', 'type': 'str'},
'plan_region_id': {'key': 'planRegionId', 'type': 'str'},
'region': {'key': 'region', 'type': '[str]'},
'regions': {'key': 'regions', 'type': '[str]'},
'vc_block_list': {'key': 'vcBlockList', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword low_priority_vm_tolerant:
:paramtype low_priority_vm_tolerant: bool
:keyword cluster_block_list:
:paramtype cluster_block_list: list[str]
:keyword compute_type:
:paramtype compute_type: str
:keyword instance_type:
:paramtype instance_type: list[str]
:keyword instance_types:
:paramtype instance_types: list[str]
:keyword my_resource_only:
:paramtype my_resource_only: bool
:keyword plan_id:
:paramtype plan_id: str
:keyword plan_region_id:
:paramtype plan_region_id: str
:keyword region:
:paramtype region: list[str]
:keyword regions:
:paramtype regions: list[str]
:keyword vc_block_list:
:paramtype vc_block_list: list[str]
"""
super(TargetSelectorConfiguration, self).__init__(**kwargs)
self.low_priority_vm_tolerant = kwargs.get('low_priority_vm_tolerant', None)
self.cluster_block_list = kwargs.get('cluster_block_list', None)
self.compute_type = kwargs.get('compute_type', None)
self.instance_type = kwargs.get('instance_type', None)
self.instance_types = kwargs.get('instance_types', None)
self.my_resource_only = kwargs.get('my_resource_only', None)
self.plan_id = kwargs.get('plan_id', None)
self.plan_region_id = kwargs.get('plan_region_id', None)
self.region = kwargs.get('region', None)
self.regions = kwargs.get('regions', None)
self.vc_block_list = kwargs.get('vc_block_list', None)
class Task(msrest.serialization.Model):
"""Task.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id:
:vartype id: int
:ivar exception: Anything.
:vartype exception: any
:ivar status: Possible values include: "Created", "WaitingForActivation", "WaitingToRun",
"Running", "WaitingForChildrenToComplete", "RanToCompletion", "Canceled", "Faulted".
:vartype status: str or ~flow.models.TaskStatus
:ivar is_canceled:
:vartype is_canceled: bool
:ivar is_completed:
:vartype is_completed: bool
:ivar is_completed_successfully:
:vartype is_completed_successfully: bool
:ivar creation_options: Possible values include: "None", "PreferFairness", "LongRunning",
"AttachedToParent", "DenyChildAttach", "HideScheduler", "RunContinuationsAsynchronously".
:vartype creation_options: str or ~flow.models.TaskCreationOptions
:ivar async_state: Anything.
:vartype async_state: any
:ivar is_faulted:
:vartype is_faulted: bool
"""
_validation = {
'id': {'readonly': True},
'exception': {'readonly': True},
'is_canceled': {'readonly': True},
'is_completed': {'readonly': True},
'is_completed_successfully': {'readonly': True},
'async_state': {'readonly': True},
'is_faulted': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'exception': {'key': 'exception', 'type': 'object'},
'status': {'key': 'status', 'type': 'str'},
'is_canceled': {'key': 'isCanceled', 'type': 'bool'},
'is_completed': {'key': 'isCompleted', 'type': 'bool'},
'is_completed_successfully': {'key': 'isCompletedSuccessfully', 'type': 'bool'},
'creation_options': {'key': 'creationOptions', 'type': 'str'},
'async_state': {'key': 'asyncState', 'type': 'object'},
'is_faulted': {'key': 'isFaulted', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword status: Possible values include: "Created", "WaitingForActivation", "WaitingToRun",
"Running", "WaitingForChildrenToComplete", "RanToCompletion", "Canceled", "Faulted".
:paramtype status: str or ~flow.models.TaskStatus
:keyword creation_options: Possible values include: "None", "PreferFairness", "LongRunning",
"AttachedToParent", "DenyChildAttach", "HideScheduler", "RunContinuationsAsynchronously".
:paramtype creation_options: str or ~flow.models.TaskCreationOptions
"""
super(Task, self).__init__(**kwargs)
self.id = None
self.exception = None
self.status = kwargs.get('status', None)
self.is_canceled = None
self.is_completed = None
self.is_completed_successfully = None
self.creation_options = kwargs.get('creation_options', None)
self.async_state = None
self.is_faulted = None
class TaskControlFlowInfo(msrest.serialization.Model):
"""TaskControlFlowInfo.
:ivar control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:vartype control_flow_type: str or ~flow.models.ControlFlowType
:ivar iteration_index:
:vartype iteration_index: int
:ivar item_name:
:vartype item_name: str
:ivar parameters_overwritten: Dictionary of :code:`<string>`.
:vartype parameters_overwritten: dict[str, str]
:ivar is_reused:
:vartype is_reused: bool
"""
_attribute_map = {
'control_flow_type': {'key': 'controlFlowType', 'type': 'str'},
'iteration_index': {'key': 'iterationIndex', 'type': 'int'},
'item_name': {'key': 'itemName', 'type': 'str'},
'parameters_overwritten': {'key': 'parametersOverwritten', 'type': '{str}'},
'is_reused': {'key': 'isReused', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:paramtype control_flow_type: str or ~flow.models.ControlFlowType
:keyword iteration_index:
:paramtype iteration_index: int
:keyword item_name:
:paramtype item_name: str
:keyword parameters_overwritten: Dictionary of :code:`<string>`.
:paramtype parameters_overwritten: dict[str, str]
:keyword is_reused:
:paramtype is_reused: bool
"""
super(TaskControlFlowInfo, self).__init__(**kwargs)
self.control_flow_type = kwargs.get('control_flow_type', None)
self.iteration_index = kwargs.get('iteration_index', None)
self.item_name = kwargs.get('item_name', None)
self.parameters_overwritten = kwargs.get('parameters_overwritten', None)
self.is_reused = kwargs.get('is_reused', None)
class TaskReuseInfo(msrest.serialization.Model):
"""TaskReuseInfo.
:ivar experiment_id:
:vartype experiment_id: str
:ivar pipeline_run_id:
:vartype pipeline_run_id: str
:ivar node_id:
:vartype node_id: str
:ivar request_id:
:vartype request_id: str
:ivar run_id:
:vartype run_id: str
:ivar node_start_time:
:vartype node_start_time: ~datetime.datetime
:ivar node_end_time:
:vartype node_end_time: ~datetime.datetime
"""
_attribute_map = {
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
'node_id': {'key': 'nodeId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'node_start_time': {'key': 'nodeStartTime', 'type': 'iso-8601'},
'node_end_time': {'key': 'nodeEndTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword experiment_id:
:paramtype experiment_id: str
:keyword pipeline_run_id:
:paramtype pipeline_run_id: str
:keyword node_id:
:paramtype node_id: str
:keyword request_id:
:paramtype request_id: str
:keyword run_id:
:paramtype run_id: str
:keyword node_start_time:
:paramtype node_start_time: ~datetime.datetime
:keyword node_end_time:
:paramtype node_end_time: ~datetime.datetime
"""
super(TaskReuseInfo, self).__init__(**kwargs)
self.experiment_id = kwargs.get('experiment_id', None)
self.pipeline_run_id = kwargs.get('pipeline_run_id', None)
self.node_id = kwargs.get('node_id', None)
self.request_id = kwargs.get('request_id', None)
self.run_id = kwargs.get('run_id', None)
self.node_start_time = kwargs.get('node_start_time', None)
self.node_end_time = kwargs.get('node_end_time', None)
class TensorflowConfiguration(msrest.serialization.Model):
"""TensorflowConfiguration.
:ivar worker_count:
:vartype worker_count: int
:ivar parameter_server_count:
:vartype parameter_server_count: int
"""
_attribute_map = {
'worker_count': {'key': 'workerCount', 'type': 'int'},
'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword worker_count:
:paramtype worker_count: int
:keyword parameter_server_count:
:paramtype parameter_server_count: int
"""
super(TensorflowConfiguration, self).__init__(**kwargs)
self.worker_count = kwargs.get('worker_count', None)
self.parameter_server_count = kwargs.get('parameter_server_count', None)
class TestDataSettings(msrest.serialization.Model):
"""TestDataSettings.
:ivar test_data_size:
:vartype test_data_size: float
"""
_attribute_map = {
'test_data_size': {'key': 'testDataSize', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
"""
:keyword test_data_size:
:paramtype test_data_size: float
"""
super(TestDataSettings, self).__init__(**kwargs)
self.test_data_size = kwargs.get('test_data_size', None)
class Tool(msrest.serialization.Model):
"""Tool.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp", "typescript".
:vartype type: str or ~flow.models.ToolType
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.InputDefinition]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.OutputDefinition]
:ivar description:
:vartype description: str
:ivar connection_type:
:vartype connection_type: list[str or ~flow.models.ConnectionType]
:ivar module:
:vartype module: str
:ivar class_name:
:vartype class_name: str
:ivar source:
:vartype source: str
:ivar lkg_code:
:vartype lkg_code: str
:ivar code:
:vartype code: str
:ivar function:
:vartype function: str
:ivar action_type:
:vartype action_type: str
:ivar provider_config: This is a dictionary.
:vartype provider_config: dict[str, ~flow.models.InputDefinition]
:ivar function_config: This is a dictionary.
:vartype function_config: dict[str, ~flow.models.InputDefinition]
:ivar icon: Anything.
:vartype icon: any
:ivar category:
:vartype category: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, any]
:ivar is_builtin:
:vartype is_builtin: bool
:ivar package:
:vartype package: str
:ivar package_version:
:vartype package_version: str
:ivar default_prompt:
:vartype default_prompt: str
:ivar enable_kwargs:
:vartype enable_kwargs: bool
:ivar deprecated_tools:
:vartype deprecated_tools: list[str]
:ivar tool_state: Possible values include: "Stable", "Preview", "Deprecated".
:vartype tool_state: str or ~flow.models.ToolState
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '{InputDefinition}'},
'outputs': {'key': 'outputs', 'type': '{OutputDefinition}'},
'description': {'key': 'description', 'type': 'str'},
'connection_type': {'key': 'connection_type', 'type': '[str]'},
'module': {'key': 'module', 'type': 'str'},
'class_name': {'key': 'class_name', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'},
'lkg_code': {'key': 'lkgCode', 'type': 'str'},
'code': {'key': 'code', 'type': 'str'},
'function': {'key': 'function', 'type': 'str'},
'action_type': {'key': 'action_type', 'type': 'str'},
'provider_config': {'key': 'provider_config', 'type': '{InputDefinition}'},
'function_config': {'key': 'function_config', 'type': '{InputDefinition}'},
'icon': {'key': 'icon', 'type': 'object'},
'category': {'key': 'category', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{object}'},
'is_builtin': {'key': 'is_builtin', 'type': 'bool'},
'package': {'key': 'package', 'type': 'str'},
'package_version': {'key': 'package_version', 'type': 'str'},
'default_prompt': {'key': 'default_prompt', 'type': 'str'},
'enable_kwargs': {'key': 'enable_kwargs', 'type': 'bool'},
'deprecated_tools': {'key': 'deprecated_tools', 'type': '[str]'},
'tool_state': {'key': 'tool_state', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp", "typescript".
:paramtype type: str or ~flow.models.ToolType
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.InputDefinition]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.OutputDefinition]
:keyword description:
:paramtype description: str
:keyword connection_type:
:paramtype connection_type: list[str or ~flow.models.ConnectionType]
:keyword module:
:paramtype module: str
:keyword class_name:
:paramtype class_name: str
:keyword source:
:paramtype source: str
:keyword lkg_code:
:paramtype lkg_code: str
:keyword code:
:paramtype code: str
:keyword function:
:paramtype function: str
:keyword action_type:
:paramtype action_type: str
:keyword provider_config: This is a dictionary.
:paramtype provider_config: dict[str, ~flow.models.InputDefinition]
:keyword function_config: This is a dictionary.
:paramtype function_config: dict[str, ~flow.models.InputDefinition]
:keyword icon: Anything.
:paramtype icon: any
:keyword category:
:paramtype category: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, any]
:keyword is_builtin:
:paramtype is_builtin: bool
:keyword package:
:paramtype package: str
:keyword package_version:
:paramtype package_version: str
:keyword default_prompt:
:paramtype default_prompt: str
:keyword enable_kwargs:
:paramtype enable_kwargs: bool
:keyword deprecated_tools:
:paramtype deprecated_tools: list[str]
:keyword tool_state: Possible values include: "Stable", "Preview", "Deprecated".
:paramtype tool_state: str or ~flow.models.ToolState
"""
super(Tool, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.description = kwargs.get('description', None)
self.connection_type = kwargs.get('connection_type', None)
self.module = kwargs.get('module', None)
self.class_name = kwargs.get('class_name', None)
self.source = kwargs.get('source', None)
self.lkg_code = kwargs.get('lkg_code', None)
self.code = kwargs.get('code', None)
self.function = kwargs.get('function', None)
self.action_type = kwargs.get('action_type', None)
self.provider_config = kwargs.get('provider_config', None)
self.function_config = kwargs.get('function_config', None)
self.icon = kwargs.get('icon', None)
self.category = kwargs.get('category', None)
self.tags = kwargs.get('tags', None)
self.is_builtin = kwargs.get('is_builtin', None)
self.package = kwargs.get('package', None)
self.package_version = kwargs.get('package_version', None)
self.default_prompt = kwargs.get('default_prompt', None)
self.enable_kwargs = kwargs.get('enable_kwargs', None)
self.deprecated_tools = kwargs.get('deprecated_tools', None)
self.tool_state = kwargs.get('tool_state', None)
class ToolFuncResponse(msrest.serialization.Model):
"""ToolFuncResponse.
:ivar result: Anything.
:vartype result: any
:ivar logs: This is a dictionary.
:vartype logs: dict[str, str]
"""
_attribute_map = {
'result': {'key': 'result', 'type': 'object'},
'logs': {'key': 'logs', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword result: Anything.
:paramtype result: any
:keyword logs: This is a dictionary.
:paramtype logs: dict[str, str]
"""
super(ToolFuncResponse, self).__init__(**kwargs)
self.result = kwargs.get('result', None)
self.logs = kwargs.get('logs', None)
class ToolInputDynamicList(msrest.serialization.Model):
"""ToolInputDynamicList.
:ivar func_path:
:vartype func_path: str
:ivar func_kwargs:
:vartype func_kwargs: list[dict[str, any]]
"""
_attribute_map = {
'func_path': {'key': 'func_path', 'type': 'str'},
'func_kwargs': {'key': 'func_kwargs', 'type': '[{object}]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword func_path:
:paramtype func_path: str
:keyword func_kwargs:
:paramtype func_kwargs: list[dict[str, any]]
"""
super(ToolInputDynamicList, self).__init__(**kwargs)
self.func_path = kwargs.get('func_path', None)
self.func_kwargs = kwargs.get('func_kwargs', None)
class ToolInputGeneratedBy(msrest.serialization.Model):
"""ToolInputGeneratedBy.
:ivar func_path:
:vartype func_path: str
:ivar func_kwargs:
:vartype func_kwargs: list[dict[str, any]]
:ivar reverse_func_path:
:vartype reverse_func_path: str
"""
_attribute_map = {
'func_path': {'key': 'func_path', 'type': 'str'},
'func_kwargs': {'key': 'func_kwargs', 'type': '[{object}]'},
'reverse_func_path': {'key': 'reverse_func_path', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword func_path:
:paramtype func_path: str
:keyword func_kwargs:
:paramtype func_kwargs: list[dict[str, any]]
:keyword reverse_func_path:
:paramtype reverse_func_path: str
"""
super(ToolInputGeneratedBy, self).__init__(**kwargs)
self.func_path = kwargs.get('func_path', None)
self.func_kwargs = kwargs.get('func_kwargs', None)
self.reverse_func_path = kwargs.get('reverse_func_path', None)
class ToolMetaDto(msrest.serialization.Model):
"""ToolMetaDto.
:ivar tools: This is a dictionary.
:vartype tools: dict[str, ~flow.models.Tool]
:ivar errors: This is a dictionary.
:vartype errors: dict[str, ~flow.models.ErrorResponse]
"""
_attribute_map = {
'tools': {'key': 'tools', 'type': '{Tool}'},
'errors': {'key': 'errors', 'type': '{ErrorResponse}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword tools: This is a dictionary.
:paramtype tools: dict[str, ~flow.models.Tool]
:keyword errors: This is a dictionary.
:paramtype errors: dict[str, ~flow.models.ErrorResponse]
"""
super(ToolMetaDto, self).__init__(**kwargs)
self.tools = kwargs.get('tools', None)
self.errors = kwargs.get('errors', None)
class ToolSetting(msrest.serialization.Model):
"""ToolSetting.
:ivar providers:
:vartype providers: list[~flow.models.ProviderEntity]
"""
_attribute_map = {
'providers': {'key': 'providers', 'type': '[ProviderEntity]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword providers:
:paramtype providers: list[~flow.models.ProviderEntity]
"""
super(ToolSetting, self).__init__(**kwargs)
self.providers = kwargs.get('providers', None)
class ToolSourceMeta(msrest.serialization.Model):
"""ToolSourceMeta.
:ivar tool_type:
:vartype tool_type: str
"""
_attribute_map = {
'tool_type': {'key': 'tool_type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword tool_type:
:paramtype tool_type: str
"""
super(ToolSourceMeta, self).__init__(**kwargs)
self.tool_type = kwargs.get('tool_type', None)
class TorchDistributedConfiguration(msrest.serialization.Model):
"""TorchDistributedConfiguration.
:ivar process_count_per_node:
:vartype process_count_per_node: int
"""
_attribute_map = {
'process_count_per_node': {'key': 'processCountPerNode', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword process_count_per_node:
:paramtype process_count_per_node: int
"""
super(TorchDistributedConfiguration, self).__init__(**kwargs)
self.process_count_per_node = kwargs.get('process_count_per_node', None)
class TrainingDiagnosticConfiguration(msrest.serialization.Model):
"""TrainingDiagnosticConfiguration.
:ivar job_heart_beat_timeout_seconds:
:vartype job_heart_beat_timeout_seconds: int
"""
_attribute_map = {
'job_heart_beat_timeout_seconds': {'key': 'jobHeartBeatTimeoutSeconds', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_heart_beat_timeout_seconds:
:paramtype job_heart_beat_timeout_seconds: int
"""
super(TrainingDiagnosticConfiguration, self).__init__(**kwargs)
self.job_heart_beat_timeout_seconds = kwargs.get('job_heart_beat_timeout_seconds', None)
class TrainingOutput(msrest.serialization.Model):
"""TrainingOutput.
:ivar training_output_type: Possible values include: "Metrics", "Model".
:vartype training_output_type: str or ~flow.models.TrainingOutputType
:ivar iteration:
:vartype iteration: int
:ivar metric:
:vartype metric: str
:ivar model_file:
:vartype model_file: str
"""
_attribute_map = {
'training_output_type': {'key': 'trainingOutputType', 'type': 'str'},
'iteration': {'key': 'iteration', 'type': 'int'},
'metric': {'key': 'metric', 'type': 'str'},
'model_file': {'key': 'modelFile', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword training_output_type: Possible values include: "Metrics", "Model".
:paramtype training_output_type: str or ~flow.models.TrainingOutputType
:keyword iteration:
:paramtype iteration: int
:keyword metric:
:paramtype metric: str
:keyword model_file:
:paramtype model_file: str
"""
super(TrainingOutput, self).__init__(**kwargs)
self.training_output_type = kwargs.get('training_output_type', None)
self.iteration = kwargs.get('iteration', None)
self.metric = kwargs.get('metric', None)
self.model_file = kwargs.get('model_file', None)
class TrainingSettings(msrest.serialization.Model):
"""TrainingSettings.
:ivar block_list_models:
:vartype block_list_models: list[str]
:ivar allow_list_models:
:vartype allow_list_models: list[str]
:ivar enable_dnn_training:
:vartype enable_dnn_training: bool
:ivar enable_onnx_compatible_models:
:vartype enable_onnx_compatible_models: bool
:ivar stack_ensemble_settings:
:vartype stack_ensemble_settings: ~flow.models.StackEnsembleSettings
:ivar enable_stack_ensemble:
:vartype enable_stack_ensemble: bool
:ivar enable_vote_ensemble:
:vartype enable_vote_ensemble: bool
:ivar ensemble_model_download_timeout:
:vartype ensemble_model_download_timeout: str
:ivar enable_model_explainability:
:vartype enable_model_explainability: bool
:ivar training_mode: Possible values include: "Distributed", "NonDistributed", "Auto".
:vartype training_mode: str or ~flow.models.TabularTrainingMode
"""
_attribute_map = {
'block_list_models': {'key': 'blockListModels', 'type': '[str]'},
'allow_list_models': {'key': 'allowListModels', 'type': '[str]'},
'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'},
'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'},
'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'},
'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'},
'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'},
'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'str'},
'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'},
'training_mode': {'key': 'trainingMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword block_list_models:
:paramtype block_list_models: list[str]
:keyword allow_list_models:
:paramtype allow_list_models: list[str]
:keyword enable_dnn_training:
:paramtype enable_dnn_training: bool
:keyword enable_onnx_compatible_models:
:paramtype enable_onnx_compatible_models: bool
:keyword stack_ensemble_settings:
:paramtype stack_ensemble_settings: ~flow.models.StackEnsembleSettings
:keyword enable_stack_ensemble:
:paramtype enable_stack_ensemble: bool
:keyword enable_vote_ensemble:
:paramtype enable_vote_ensemble: bool
:keyword ensemble_model_download_timeout:
:paramtype ensemble_model_download_timeout: str
:keyword enable_model_explainability:
:paramtype enable_model_explainability: bool
:keyword training_mode: Possible values include: "Distributed", "NonDistributed", "Auto".
:paramtype training_mode: str or ~flow.models.TabularTrainingMode
"""
super(TrainingSettings, self).__init__(**kwargs)
self.block_list_models = kwargs.get('block_list_models', None)
self.allow_list_models = kwargs.get('allow_list_models', None)
self.enable_dnn_training = kwargs.get('enable_dnn_training', None)
self.enable_onnx_compatible_models = kwargs.get('enable_onnx_compatible_models', None)
self.stack_ensemble_settings = kwargs.get('stack_ensemble_settings', None)
self.enable_stack_ensemble = kwargs.get('enable_stack_ensemble', None)
self.enable_vote_ensemble = kwargs.get('enable_vote_ensemble', None)
self.ensemble_model_download_timeout = kwargs.get('ensemble_model_download_timeout', None)
self.enable_model_explainability = kwargs.get('enable_model_explainability', None)
self.training_mode = kwargs.get('training_mode', None)
class TriggerAsyncOperationStatus(msrest.serialization.Model):
"""TriggerAsyncOperationStatus.
:ivar id:
:vartype id: str
:ivar operation_type: Possible values include: "Create", "Update", "Delete", "CreateOrUpdate".
:vartype operation_type: str or ~flow.models.TriggerOperationType
:ivar provisioning_status: Possible values include: "Creating", "Updating", "Deleting",
"Succeeded", "Failed", "Canceled".
:vartype provisioning_status: str or ~flow.models.ScheduleProvisioningStatus
:ivar created_time:
:vartype created_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
:ivar status_code: Possible values include: "Continue", "SwitchingProtocols", "Processing",
"EarlyHints", "OK", "Created", "Accepted", "NonAuthoritativeInformation", "NoContent",
"ResetContent", "PartialContent", "MultiStatus", "AlreadyReported", "IMUsed",
"MultipleChoices", "Ambiguous", "MovedPermanently", "Moved", "Found", "Redirect", "SeeOther",
"RedirectMethod", "NotModified", "UseProxy", "Unused", "TemporaryRedirect", "RedirectKeepVerb",
"PermanentRedirect", "BadRequest", "Unauthorized", "PaymentRequired", "Forbidden", "NotFound",
"MethodNotAllowed", "NotAcceptable", "ProxyAuthenticationRequired", "RequestTimeout",
"Conflict", "Gone", "LengthRequired", "PreconditionFailed", "RequestEntityTooLarge",
"RequestUriTooLong", "UnsupportedMediaType", "RequestedRangeNotSatisfiable",
"ExpectationFailed", "MisdirectedRequest", "UnprocessableEntity", "Locked", "FailedDependency",
"UpgradeRequired", "PreconditionRequired", "TooManyRequests", "RequestHeaderFieldsTooLarge",
"UnavailableForLegalReasons", "InternalServerError", "NotImplemented", "BadGateway",
"ServiceUnavailable", "GatewayTimeout", "HttpVersionNotSupported", "VariantAlsoNegotiates",
"InsufficientStorage", "LoopDetected", "NotExtended", "NetworkAuthenticationRequired".
:vartype status_code: str or ~flow.models.HttpStatusCode
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'operation_type': {'key': 'operationType', 'type': 'str'},
'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
'status_code': {'key': 'statusCode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword operation_type: Possible values include: "Create", "Update", "Delete",
"CreateOrUpdate".
:paramtype operation_type: str or ~flow.models.TriggerOperationType
:keyword provisioning_status: Possible values include: "Creating", "Updating", "Deleting",
"Succeeded", "Failed", "Canceled".
:paramtype provisioning_status: str or ~flow.models.ScheduleProvisioningStatus
:keyword created_time:
:paramtype created_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
:keyword status_code: Possible values include: "Continue", "SwitchingProtocols", "Processing",
"EarlyHints", "OK", "Created", "Accepted", "NonAuthoritativeInformation", "NoContent",
"ResetContent", "PartialContent", "MultiStatus", "AlreadyReported", "IMUsed",
"MultipleChoices", "Ambiguous", "MovedPermanently", "Moved", "Found", "Redirect", "SeeOther",
"RedirectMethod", "NotModified", "UseProxy", "Unused", "TemporaryRedirect", "RedirectKeepVerb",
"PermanentRedirect", "BadRequest", "Unauthorized", "PaymentRequired", "Forbidden", "NotFound",
"MethodNotAllowed", "NotAcceptable", "ProxyAuthenticationRequired", "RequestTimeout",
"Conflict", "Gone", "LengthRequired", "PreconditionFailed", "RequestEntityTooLarge",
"RequestUriTooLong", "UnsupportedMediaType", "RequestedRangeNotSatisfiable",
"ExpectationFailed", "MisdirectedRequest", "UnprocessableEntity", "Locked", "FailedDependency",
"UpgradeRequired", "PreconditionRequired", "TooManyRequests", "RequestHeaderFieldsTooLarge",
"UnavailableForLegalReasons", "InternalServerError", "NotImplemented", "BadGateway",
"ServiceUnavailable", "GatewayTimeout", "HttpVersionNotSupported", "VariantAlsoNegotiates",
"InsufficientStorage", "LoopDetected", "NotExtended", "NetworkAuthenticationRequired".
:paramtype status_code: str or ~flow.models.HttpStatusCode
"""
super(TriggerAsyncOperationStatus, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.operation_type = kwargs.get('operation_type', None)
self.provisioning_status = kwargs.get('provisioning_status', None)
self.created_time = kwargs.get('created_time', None)
self.end_time = kwargs.get('end_time', None)
self.error = kwargs.get('error', None)
self.status_code = kwargs.get('status_code', None)
class TuningNodeRunSetting(msrest.serialization.Model):
"""TuningNodeRunSetting.
:ivar simulation_flow:
:vartype simulation_flow: ~flow.models.FlowGraphReference
:ivar simulation_flow_run_setting:
:vartype simulation_flow_run_setting: ~flow.models.FlowRunSettingsBase
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar input_universal_link:
:vartype input_universal_link: str
:ivar data_inputs: This is a dictionary.
:vartype data_inputs: dict[str, str]
:ivar flow_run_output_directory:
:vartype flow_run_output_directory: str
:ivar connection_overrides:
:vartype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar runtime_name:
:vartype runtime_name: str
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar connections: This is a dictionary.
:vartype connections: dict[str, dict[str, str]]
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, str]
:ivar output_data_store:
:vartype output_data_store: str
:ivar run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:vartype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar worker_count:
:vartype worker_count: int
:ivar timeout_in_seconds:
:vartype timeout_in_seconds: int
:ivar promptflow_engine_type: Possible values include: "FastEngine", "ScalableEngine".
:vartype promptflow_engine_type: str or ~flow.models.PromptflowEngineType
"""
_attribute_map = {
'simulation_flow': {'key': 'simulationFlow', 'type': 'FlowGraphReference'},
'simulation_flow_run_setting': {'key': 'simulationFlowRunSetting', 'type': 'FlowRunSettingsBase'},
'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'},
'input_universal_link': {'key': 'inputUniversalLink', 'type': 'str'},
'data_inputs': {'key': 'dataInputs', 'type': '{str}'},
'flow_run_output_directory': {'key': 'flowRunOutputDirectory', 'type': 'str'},
'connection_overrides': {'key': 'connectionOverrides', 'type': '[ConnectionOverrideSetting]'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'connections': {'key': 'connections', 'type': '{{str}}'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'output_data_store': {'key': 'outputDataStore', 'type': 'str'},
'run_display_name_generation_type': {'key': 'runDisplayNameGenerationType', 'type': 'str'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'worker_count': {'key': 'workerCount', 'type': 'int'},
'timeout_in_seconds': {'key': 'timeoutInSeconds', 'type': 'int'},
'promptflow_engine_type': {'key': 'promptflowEngineType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword simulation_flow:
:paramtype simulation_flow: ~flow.models.FlowGraphReference
:keyword simulation_flow_run_setting:
:paramtype simulation_flow_run_setting: ~flow.models.FlowRunSettingsBase
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword input_universal_link:
:paramtype input_universal_link: str
:keyword data_inputs: This is a dictionary.
:paramtype data_inputs: dict[str, str]
:keyword flow_run_output_directory:
:paramtype flow_run_output_directory: str
:keyword connection_overrides:
:paramtype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword runtime_name:
:paramtype runtime_name: str
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword connections: This is a dictionary.
:paramtype connections: dict[str, dict[str, str]]
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, str]
:keyword output_data_store:
:paramtype output_data_store: str
:keyword run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:paramtype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword worker_count:
:paramtype worker_count: int
:keyword timeout_in_seconds:
:paramtype timeout_in_seconds: int
:keyword promptflow_engine_type: Possible values include: "FastEngine", "ScalableEngine".
:paramtype promptflow_engine_type: str or ~flow.models.PromptflowEngineType
"""
super(TuningNodeRunSetting, self).__init__(**kwargs)
self.simulation_flow = kwargs.get('simulation_flow', None)
self.simulation_flow_run_setting = kwargs.get('simulation_flow_run_setting', None)
self.batch_inputs = kwargs.get('batch_inputs', None)
self.input_universal_link = kwargs.get('input_universal_link', None)
self.data_inputs = kwargs.get('data_inputs', None)
self.flow_run_output_directory = kwargs.get('flow_run_output_directory', None)
self.connection_overrides = kwargs.get('connection_overrides', None)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
self.inputs_mapping = kwargs.get('inputs_mapping', None)
self.connections = kwargs.get('connections', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.output_data_store = kwargs.get('output_data_store', None)
self.run_display_name_generation_type = kwargs.get('run_display_name_generation_type', None)
self.aml_compute_name = kwargs.get('aml_compute_name', None)
self.worker_count = kwargs.get('worker_count', None)
self.timeout_in_seconds = kwargs.get('timeout_in_seconds', None)
self.promptflow_engine_type = kwargs.get('promptflow_engine_type', None)
class TuningNodeSetting(msrest.serialization.Model):
"""TuningNodeSetting.
:ivar variant_ids:
:vartype variant_ids: list[str]
:ivar tuning_node_run_settings: This is a dictionary.
:vartype tuning_node_run_settings: dict[str, ~flow.models.TuningNodeRunSetting]
"""
_attribute_map = {
'variant_ids': {'key': 'variantIds', 'type': '[str]'},
'tuning_node_run_settings': {'key': 'tuningNodeRunSettings', 'type': '{TuningNodeRunSetting}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword variant_ids:
:paramtype variant_ids: list[str]
:keyword tuning_node_run_settings: This is a dictionary.
:paramtype tuning_node_run_settings: dict[str, ~flow.models.TuningNodeRunSetting]
"""
super(TuningNodeSetting, self).__init__(**kwargs)
self.variant_ids = kwargs.get('variant_ids', None)
self.tuning_node_run_settings = kwargs.get('tuning_node_run_settings', None)
class TypedAssetReference(msrest.serialization.Model):
"""TypedAssetReference.
:ivar asset_id:
:vartype asset_id: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'asset_id': {'key': 'assetId', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword asset_id:
:paramtype asset_id: str
:keyword type:
:paramtype type: str
"""
super(TypedAssetReference, self).__init__(**kwargs)
self.asset_id = kwargs.get('asset_id', None)
self.type = kwargs.get('type', None)
class UIAzureOpenAIDeploymentNameSelector(msrest.serialization.Model):
"""UIAzureOpenAIDeploymentNameSelector.
:ivar capabilities:
:vartype capabilities: ~flow.models.UIAzureOpenAIModelCapabilities
"""
_attribute_map = {
'capabilities': {'key': 'Capabilities', 'type': 'UIAzureOpenAIModelCapabilities'},
}
def __init__(
self,
**kwargs
):
"""
:keyword capabilities:
:paramtype capabilities: ~flow.models.UIAzureOpenAIModelCapabilities
"""
super(UIAzureOpenAIDeploymentNameSelector, self).__init__(**kwargs)
self.capabilities = kwargs.get('capabilities', None)
class UIAzureOpenAIModelCapabilities(msrest.serialization.Model):
"""UIAzureOpenAIModelCapabilities.
:ivar completion:
:vartype completion: bool
:ivar chat_completion:
:vartype chat_completion: bool
:ivar embeddings:
:vartype embeddings: bool
"""
_attribute_map = {
'completion': {'key': 'Completion', 'type': 'bool'},
'chat_completion': {'key': 'ChatCompletion', 'type': 'bool'},
'embeddings': {'key': 'Embeddings', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword completion:
:paramtype completion: bool
:keyword chat_completion:
:paramtype chat_completion: bool
:keyword embeddings:
:paramtype embeddings: bool
"""
super(UIAzureOpenAIModelCapabilities, self).__init__(**kwargs)
self.completion = kwargs.get('completion', None)
self.chat_completion = kwargs.get('chat_completion', None)
self.embeddings = kwargs.get('embeddings', None)
class UIColumnPicker(msrest.serialization.Model):
"""UIColumnPicker.
:ivar column_picker_for:
:vartype column_picker_for: str
:ivar column_selection_categories:
:vartype column_selection_categories: list[str]
:ivar single_column_selection:
:vartype single_column_selection: bool
"""
_attribute_map = {
'column_picker_for': {'key': 'columnPickerFor', 'type': 'str'},
'column_selection_categories': {'key': 'columnSelectionCategories', 'type': '[str]'},
'single_column_selection': {'key': 'singleColumnSelection', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword column_picker_for:
:paramtype column_picker_for: str
:keyword column_selection_categories:
:paramtype column_selection_categories: list[str]
:keyword single_column_selection:
:paramtype single_column_selection: bool
"""
super(UIColumnPicker, self).__init__(**kwargs)
self.column_picker_for = kwargs.get('column_picker_for', None)
self.column_selection_categories = kwargs.get('column_selection_categories', None)
self.single_column_selection = kwargs.get('single_column_selection', None)
class UIComputeSelection(msrest.serialization.Model):
"""UIComputeSelection.
:ivar compute_types:
:vartype compute_types: list[str]
:ivar require_gpu:
:vartype require_gpu: bool
:ivar os_types:
:vartype os_types: list[str]
:ivar support_serverless:
:vartype support_serverless: bool
:ivar compute_run_settings_mapping: Dictionary of
<components·10my8oj·schemas·uicomputeselection·properties·computerunsettingsmapping·additionalproperties>.
:vartype compute_run_settings_mapping: dict[str, list[~flow.models.RunSettingParameter]]
"""
_attribute_map = {
'compute_types': {'key': 'computeTypes', 'type': '[str]'},
'require_gpu': {'key': 'requireGpu', 'type': 'bool'},
'os_types': {'key': 'osTypes', 'type': '[str]'},
'support_serverless': {'key': 'supportServerless', 'type': 'bool'},
'compute_run_settings_mapping': {'key': 'computeRunSettingsMapping', 'type': '{[RunSettingParameter]}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword compute_types:
:paramtype compute_types: list[str]
:keyword require_gpu:
:paramtype require_gpu: bool
:keyword os_types:
:paramtype os_types: list[str]
:keyword support_serverless:
:paramtype support_serverless: bool
:keyword compute_run_settings_mapping: Dictionary of
<components·10my8oj·schemas·uicomputeselection·properties·computerunsettingsmapping·additionalproperties>.
:paramtype compute_run_settings_mapping: dict[str, list[~flow.models.RunSettingParameter]]
"""
super(UIComputeSelection, self).__init__(**kwargs)
self.compute_types = kwargs.get('compute_types', None)
self.require_gpu = kwargs.get('require_gpu', None)
self.os_types = kwargs.get('os_types', None)
self.support_serverless = kwargs.get('support_serverless', None)
self.compute_run_settings_mapping = kwargs.get('compute_run_settings_mapping', None)
class UIHyperparameterConfiguration(msrest.serialization.Model):
"""UIHyperparameterConfiguration.
:ivar model_name_to_hyper_parameter_and_distribution_mapping: Dictionary of
<components·1nrp69t·schemas·uihyperparameterconfiguration·properties·modelnametohyperparameteranddistributionmapping·additionalproperties>.
:vartype model_name_to_hyper_parameter_and_distribution_mapping: dict[str, dict[str,
list[str]]]
:ivar distribution_parameters_mapping: Dictionary of
<components·d9plq4·schemas·uihyperparameterconfiguration·properties·distributionparametersmapping·additionalproperties>.
:vartype distribution_parameters_mapping: dict[str, list[~flow.models.DistributionParameter]]
:ivar json_schema:
:vartype json_schema: str
"""
_attribute_map = {
'model_name_to_hyper_parameter_and_distribution_mapping': {'key': 'modelNameToHyperParameterAndDistributionMapping', 'type': '{{[str]}}'},
'distribution_parameters_mapping': {'key': 'distributionParametersMapping', 'type': '{[DistributionParameter]}'},
'json_schema': {'key': 'jsonSchema', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword model_name_to_hyper_parameter_and_distribution_mapping: Dictionary of
<components·1nrp69t·schemas·uihyperparameterconfiguration·properties·modelnametohyperparameteranddistributionmapping·additionalproperties>.
:paramtype model_name_to_hyper_parameter_and_distribution_mapping: dict[str, dict[str,
list[str]]]
:keyword distribution_parameters_mapping: Dictionary of
<components·d9plq4·schemas·uihyperparameterconfiguration·properties·distributionparametersmapping·additionalproperties>.
:paramtype distribution_parameters_mapping: dict[str, list[~flow.models.DistributionParameter]]
:keyword json_schema:
:paramtype json_schema: str
"""
super(UIHyperparameterConfiguration, self).__init__(**kwargs)
self.model_name_to_hyper_parameter_and_distribution_mapping = kwargs.get('model_name_to_hyper_parameter_and_distribution_mapping', None)
self.distribution_parameters_mapping = kwargs.get('distribution_parameters_mapping', None)
self.json_schema = kwargs.get('json_schema', None)
class UIInputSetting(msrest.serialization.Model):
"""UIInputSetting.
:ivar name:
:vartype name: str
:ivar data_delivery_mode: Possible values include: "Read-only mount", "Read-write mount",
"Download", "Direct", "Evaluate mount", "Evaluate download", "Hdfs".
:vartype data_delivery_mode: str or ~flow.models.UIInputDataDeliveryMode
:ivar path_on_compute:
:vartype path_on_compute: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'data_delivery_mode': {'key': 'dataDeliveryMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword data_delivery_mode: Possible values include: "Read-only mount", "Read-write mount",
"Download", "Direct", "Evaluate mount", "Evaluate download", "Hdfs".
:paramtype data_delivery_mode: str or ~flow.models.UIInputDataDeliveryMode
:keyword path_on_compute:
:paramtype path_on_compute: str
"""
super(UIInputSetting, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.data_delivery_mode = kwargs.get('data_delivery_mode', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
class UIJsonEditor(msrest.serialization.Model):
"""UIJsonEditor.
:ivar json_schema:
:vartype json_schema: str
"""
_attribute_map = {
'json_schema': {'key': 'jsonSchema', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword json_schema:
:paramtype json_schema: str
"""
super(UIJsonEditor, self).__init__(**kwargs)
self.json_schema = kwargs.get('json_schema', None)
class UIParameterHint(msrest.serialization.Model):
"""UIParameterHint.
:ivar ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential",
"Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle",
"YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection",
"ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection".
:vartype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum
:ivar column_picker:
:vartype column_picker: ~flow.models.UIColumnPicker
:ivar ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql".
:vartype ui_script_language: str or ~flow.models.UIScriptLanguageEnum
:ivar json_editor:
:vartype json_editor: ~flow.models.UIJsonEditor
:ivar prompt_flow_connection_selector:
:vartype prompt_flow_connection_selector: ~flow.models.UIPromptFlowConnectionSelector
:ivar azure_open_ai_deployment_name_selector:
:vartype azure_open_ai_deployment_name_selector:
~flow.models.UIAzureOpenAIDeploymentNameSelector
:ivar ux_ignore:
:vartype ux_ignore: bool
:ivar anonymous:
:vartype anonymous: bool
"""
_attribute_map = {
'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'},
'column_picker': {'key': 'columnPicker', 'type': 'UIColumnPicker'},
'ui_script_language': {'key': 'uiScriptLanguage', 'type': 'str'},
'json_editor': {'key': 'jsonEditor', 'type': 'UIJsonEditor'},
'prompt_flow_connection_selector': {'key': 'PromptFlowConnectionSelector', 'type': 'UIPromptFlowConnectionSelector'},
'azure_open_ai_deployment_name_selector': {'key': 'AzureOpenAIDeploymentNameSelector', 'type': 'UIAzureOpenAIDeploymentNameSelector'},
'ux_ignore': {'key': 'UxIgnore', 'type': 'bool'},
'anonymous': {'key': 'Anonymous', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker",
"Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter",
"SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection",
"InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection",
"AzureOpenAIDeploymentNameSelection".
:paramtype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum
:keyword column_picker:
:paramtype column_picker: ~flow.models.UIColumnPicker
:keyword ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql".
:paramtype ui_script_language: str or ~flow.models.UIScriptLanguageEnum
:keyword json_editor:
:paramtype json_editor: ~flow.models.UIJsonEditor
:keyword prompt_flow_connection_selector:
:paramtype prompt_flow_connection_selector: ~flow.models.UIPromptFlowConnectionSelector
:keyword azure_open_ai_deployment_name_selector:
:paramtype azure_open_ai_deployment_name_selector:
~flow.models.UIAzureOpenAIDeploymentNameSelector
:keyword ux_ignore:
:paramtype ux_ignore: bool
:keyword anonymous:
:paramtype anonymous: bool
"""
super(UIParameterHint, self).__init__(**kwargs)
self.ui_widget_type = kwargs.get('ui_widget_type', None)
self.column_picker = kwargs.get('column_picker', None)
self.ui_script_language = kwargs.get('ui_script_language', None)
self.json_editor = kwargs.get('json_editor', None)
self.prompt_flow_connection_selector = kwargs.get('prompt_flow_connection_selector', None)
self.azure_open_ai_deployment_name_selector = kwargs.get('azure_open_ai_deployment_name_selector', None)
self.ux_ignore = kwargs.get('ux_ignore', None)
self.anonymous = kwargs.get('anonymous', None)
class UIPromptFlowConnectionSelector(msrest.serialization.Model):
"""UIPromptFlowConnectionSelector.
:ivar prompt_flow_connection_type:
:vartype prompt_flow_connection_type: str
"""
_attribute_map = {
'prompt_flow_connection_type': {'key': 'PromptFlowConnectionType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword prompt_flow_connection_type:
:paramtype prompt_flow_connection_type: str
"""
super(UIPromptFlowConnectionSelector, self).__init__(**kwargs)
self.prompt_flow_connection_type = kwargs.get('prompt_flow_connection_type', None)
class UIWidgetMetaInfo(msrest.serialization.Model):
"""UIWidgetMetaInfo.
:ivar module_node_id:
:vartype module_node_id: str
:ivar meta_module_id:
:vartype meta_module_id: str
:ivar parameter_name:
:vartype parameter_name: str
:ivar ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential",
"Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle",
"YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection",
"ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection".
:vartype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum
"""
_attribute_map = {
'module_node_id': {'key': 'moduleNodeId', 'type': 'str'},
'meta_module_id': {'key': 'metaModuleId', 'type': 'str'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword module_node_id:
:paramtype module_node_id: str
:keyword meta_module_id:
:paramtype meta_module_id: str
:keyword parameter_name:
:paramtype parameter_name: str
:keyword ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker",
"Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter",
"SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection",
"InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection",
"AzureOpenAIDeploymentNameSelection".
:paramtype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum
"""
super(UIWidgetMetaInfo, self).__init__(**kwargs)
self.module_node_id = kwargs.get('module_node_id', None)
self.meta_module_id = kwargs.get('meta_module_id', None)
self.parameter_name = kwargs.get('parameter_name', None)
self.ui_widget_type = kwargs.get('ui_widget_type', None)
class UIYamlEditor(msrest.serialization.Model):
"""UIYamlEditor.
:ivar json_schema:
:vartype json_schema: str
"""
_attribute_map = {
'json_schema': {'key': 'jsonSchema', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword json_schema:
:paramtype json_schema: str
"""
super(UIYamlEditor, self).__init__(**kwargs)
self.json_schema = kwargs.get('json_schema', None)
class UnversionedEntityRequestDto(msrest.serialization.Model):
"""UnversionedEntityRequestDto.
:ivar unversioned_entity_ids:
:vartype unversioned_entity_ids: list[str]
"""
_attribute_map = {
'unversioned_entity_ids': {'key': 'unversionedEntityIds', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword unversioned_entity_ids:
:paramtype unversioned_entity_ids: list[str]
"""
super(UnversionedEntityRequestDto, self).__init__(**kwargs)
self.unversioned_entity_ids = kwargs.get('unversioned_entity_ids', None)
class UnversionedEntityResponseDto(msrest.serialization.Model):
"""UnversionedEntityResponseDto.
:ivar unversioned_entities:
:vartype unversioned_entities: list[~flow.models.FlowIndexEntity]
:ivar unversioned_entity_json_schema: Anything.
:vartype unversioned_entity_json_schema: any
:ivar normalized_request_charge:
:vartype normalized_request_charge: float
:ivar normalized_request_charge_period:
:vartype normalized_request_charge_period: str
"""
_attribute_map = {
'unversioned_entities': {'key': 'unversionedEntities', 'type': '[FlowIndexEntity]'},
'unversioned_entity_json_schema': {'key': 'unversionedEntityJsonSchema', 'type': 'object'},
'normalized_request_charge': {'key': 'normalizedRequestCharge', 'type': 'float'},
'normalized_request_charge_period': {'key': 'normalizedRequestChargePeriod', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword unversioned_entities:
:paramtype unversioned_entities: list[~flow.models.FlowIndexEntity]
:keyword unversioned_entity_json_schema: Anything.
:paramtype unversioned_entity_json_schema: any
:keyword normalized_request_charge:
:paramtype normalized_request_charge: float
:keyword normalized_request_charge_period:
:paramtype normalized_request_charge_period: str
"""
super(UnversionedEntityResponseDto, self).__init__(**kwargs)
self.unversioned_entities = kwargs.get('unversioned_entities', None)
self.unversioned_entity_json_schema = kwargs.get('unversioned_entity_json_schema', None)
self.normalized_request_charge = kwargs.get('normalized_request_charge', None)
self.normalized_request_charge_period = kwargs.get('normalized_request_charge_period', None)
class UnversionedRebuildIndexDto(msrest.serialization.Model):
"""UnversionedRebuildIndexDto.
:ivar continuation_token:
:vartype continuation_token: str
:ivar entity_count:
:vartype entity_count: int
:ivar entity_container_type:
:vartype entity_container_type: str
:ivar entity_type:
:vartype entity_type: str
:ivar resource_id:
:vartype resource_id: str
:ivar workspace_id:
:vartype workspace_id: str
:ivar immutable_resource_id:
:vartype immutable_resource_id: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
"""
_attribute_map = {
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'entity_count': {'key': 'entityCount', 'type': 'int'},
'entity_container_type': {'key': 'entityContainerType', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'workspace_id': {'key': 'workspaceId', 'type': 'str'},
'immutable_resource_id': {'key': 'immutableResourceId', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword continuation_token:
:paramtype continuation_token: str
:keyword entity_count:
:paramtype entity_count: int
:keyword entity_container_type:
:paramtype entity_container_type: str
:keyword entity_type:
:paramtype entity_type: str
:keyword resource_id:
:paramtype resource_id: str
:keyword workspace_id:
:paramtype workspace_id: str
:keyword immutable_resource_id:
:paramtype immutable_resource_id: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
"""
super(UnversionedRebuildIndexDto, self).__init__(**kwargs)
self.continuation_token = kwargs.get('continuation_token', None)
self.entity_count = kwargs.get('entity_count', None)
self.entity_container_type = kwargs.get('entity_container_type', None)
self.entity_type = kwargs.get('entity_type', None)
self.resource_id = kwargs.get('resource_id', None)
self.workspace_id = kwargs.get('workspace_id', None)
self.immutable_resource_id = kwargs.get('immutable_resource_id', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
class UnversionedRebuildResponseDto(msrest.serialization.Model):
"""UnversionedRebuildResponseDto.
:ivar entities:
:vartype entities: ~flow.models.SegmentedResult1
:ivar unversioned_entity_schema: Anything.
:vartype unversioned_entity_schema: any
:ivar normalized_request_charge:
:vartype normalized_request_charge: float
:ivar normalized_request_charge_period:
:vartype normalized_request_charge_period: str
"""
_attribute_map = {
'entities': {'key': 'entities', 'type': 'SegmentedResult1'},
'unversioned_entity_schema': {'key': 'unversionedEntitySchema', 'type': 'object'},
'normalized_request_charge': {'key': 'normalizedRequestCharge', 'type': 'float'},
'normalized_request_charge_period': {'key': 'normalizedRequestChargePeriod', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword entities:
:paramtype entities: ~flow.models.SegmentedResult1
:keyword unversioned_entity_schema: Anything.
:paramtype unversioned_entity_schema: any
:keyword normalized_request_charge:
:paramtype normalized_request_charge: float
:keyword normalized_request_charge_period:
:paramtype normalized_request_charge_period: str
"""
super(UnversionedRebuildResponseDto, self).__init__(**kwargs)
self.entities = kwargs.get('entities', None)
self.unversioned_entity_schema = kwargs.get('unversioned_entity_schema', None)
self.normalized_request_charge = kwargs.get('normalized_request_charge', None)
self.normalized_request_charge_period = kwargs.get('normalized_request_charge_period', None)
class UpdateComponentRequest(msrest.serialization.Model):
"""UpdateComponentRequest.
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar module_update_operation_type: Possible values include: "SetDefaultVersion",
"EnableModule", "DisableModule", "UpdateDisplayName", "UpdateDescription", "UpdateTags".
:vartype module_update_operation_type: str or ~flow.models.ModuleUpdateOperationType
:ivar module_version:
:vartype module_version: str
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'module_update_operation_type': {'key': 'moduleUpdateOperationType', 'type': 'str'},
'module_version': {'key': 'moduleVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword module_update_operation_type: Possible values include: "SetDefaultVersion",
"EnableModule", "DisableModule", "UpdateDisplayName", "UpdateDescription", "UpdateTags".
:paramtype module_update_operation_type: str or ~flow.models.ModuleUpdateOperationType
:keyword module_version:
:paramtype module_version: str
"""
super(UpdateComponentRequest, self).__init__(**kwargs)
self.display_name = kwargs.get('display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.module_update_operation_type = kwargs.get('module_update_operation_type', None)
self.module_version = kwargs.get('module_version', None)
class UpdateFlowRequest(msrest.serialization.Model):
"""UpdateFlowRequest.
:ivar flow_run_result:
:vartype flow_run_result: ~flow.models.FlowRunResult
:ivar flow_test_mode: Possible values include: "Sync", "Async".
:vartype flow_test_mode: str or ~flow.models.FlowTestMode
:ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:ivar flow_name:
:vartype flow_name: str
:ivar description:
:vartype description: str
:ivar details:
:vartype details: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar flow_run_settings:
:vartype flow_run_settings: ~flow.models.FlowRunSettings
:ivar is_archived:
:vartype is_archived: bool
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'flow_run_result': {'key': 'flowRunResult', 'type': 'FlowRunResult'},
'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'},
'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'details': {'key': 'details', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_run_result:
:paramtype flow_run_result: ~flow.models.FlowRunResult
:keyword flow_test_mode: Possible values include: "Sync", "Async".
:paramtype flow_test_mode: str or ~flow.models.FlowTestMode
:keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:keyword flow_name:
:paramtype flow_name: str
:keyword description:
:paramtype description: str
:keyword details:
:paramtype details: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword flow_run_settings:
:paramtype flow_run_settings: ~flow.models.FlowRunSettings
:keyword is_archived:
:paramtype is_archived: bool
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(UpdateFlowRequest, self).__init__(**kwargs)
self.flow_run_result = kwargs.get('flow_run_result', None)
self.flow_test_mode = kwargs.get('flow_test_mode', None)
self.flow_test_infos = kwargs.get('flow_test_infos', None)
self.flow_name = kwargs.get('flow_name', None)
self.description = kwargs.get('description', None)
self.details = kwargs.get('details', None)
self.tags = kwargs.get('tags', None)
self.flow = kwargs.get('flow', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.flow_type = kwargs.get('flow_type', None)
self.flow_run_settings = kwargs.get('flow_run_settings', None)
self.is_archived = kwargs.get('is_archived', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.identity = kwargs.get('identity', None)
class UpdateFlowRuntimeRequest(msrest.serialization.Model):
"""UpdateFlowRuntimeRequest.
:ivar runtime_description:
:vartype runtime_description: str
:ivar environment:
:vartype environment: str
:ivar instance_count:
:vartype instance_count: int
"""
_attribute_map = {
'runtime_description': {'key': 'runtimeDescription', 'type': 'str'},
'environment': {'key': 'environment', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword runtime_description:
:paramtype runtime_description: str
:keyword environment:
:paramtype environment: str
:keyword instance_count:
:paramtype instance_count: int
"""
super(UpdateFlowRuntimeRequest, self).__init__(**kwargs)
self.runtime_description = kwargs.get('runtime_description', None)
self.environment = kwargs.get('environment', None)
self.instance_count = kwargs.get('instance_count', None)
class UpdateFlowStatusRequest(msrest.serialization.Model):
"""UpdateFlowStatusRequest.
:ivar flow_run_status: Possible values include: "Started", "Completed", "Failed", "Cancelled",
"NotStarted", "Running", "Queued", "Paused", "Unapproved", "Starting", "Preparing",
"CancelRequested", "Pausing", "Finalizing", "Canceled", "Bypassed".
:vartype flow_run_status: str or ~flow.models.FlowRunStatusEnum
:ivar error_response: The error response.
:vartype error_response: ~flow.models.ErrorResponse
"""
_attribute_map = {
'flow_run_status': {'key': 'flowRunStatus', 'type': 'str'},
'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_run_status: Possible values include: "Started", "Completed", "Failed",
"Cancelled", "NotStarted", "Running", "Queued", "Paused", "Unapproved", "Starting",
"Preparing", "CancelRequested", "Pausing", "Finalizing", "Canceled", "Bypassed".
:paramtype flow_run_status: str or ~flow.models.FlowRunStatusEnum
:keyword error_response: The error response.
:paramtype error_response: ~flow.models.ErrorResponse
"""
super(UpdateFlowStatusRequest, self).__init__(**kwargs)
self.flow_run_status = kwargs.get('flow_run_status', None)
self.error_response = kwargs.get('error_response', None)
class UpdateRegistryComponentRequest(msrest.serialization.Model):
"""UpdateRegistryComponentRequest.
:ivar registry_name:
:vartype registry_name: str
:ivar component_name:
:vartype component_name: str
:ivar component_version:
:vartype component_version: str
:ivar update_type: The only acceptable values to pass in are None and "SetDefaultVersion". The
default value is None.
:vartype update_type: str
"""
_attribute_map = {
'registry_name': {'key': 'registryName', 'type': 'str'},
'component_name': {'key': 'componentName', 'type': 'str'},
'component_version': {'key': 'componentVersion', 'type': 'str'},
'update_type': {'key': 'updateType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword registry_name:
:paramtype registry_name: str
:keyword component_name:
:paramtype component_name: str
:keyword component_version:
:paramtype component_version: str
:keyword update_type: The only acceptable values to pass in are None and "SetDefaultVersion".
The default value is None.
:paramtype update_type: str
"""
super(UpdateRegistryComponentRequest, self).__init__(**kwargs)
self.registry_name = kwargs.get('registry_name', None)
self.component_name = kwargs.get('component_name', None)
self.component_version = kwargs.get('component_version', None)
self.update_type = kwargs.get('update_type', None)
class UploadOptions(msrest.serialization.Model):
"""UploadOptions.
:ivar overwrite:
:vartype overwrite: bool
:ivar source_globs:
:vartype source_globs: ~flow.models.ExecutionGlobsOptions
"""
_attribute_map = {
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'source_globs': {'key': 'sourceGlobs', 'type': 'ExecutionGlobsOptions'},
}
def __init__(
self,
**kwargs
):
"""
:keyword overwrite:
:paramtype overwrite: bool
:keyword source_globs:
:paramtype source_globs: ~flow.models.ExecutionGlobsOptions
"""
super(UploadOptions, self).__init__(**kwargs)
self.overwrite = kwargs.get('overwrite', None)
self.source_globs = kwargs.get('source_globs', None)
class UriReference(msrest.serialization.Model):
"""UriReference.
:ivar path:
:vartype path: str
:ivar is_file:
:vartype is_file: bool
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'is_file': {'key': 'isFile', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword path:
:paramtype path: str
:keyword is_file:
:paramtype is_file: bool
"""
super(UriReference, self).__init__(**kwargs)
self.path = kwargs.get('path', None)
self.is_file = kwargs.get('is_file', None)
class User(msrest.serialization.Model):
"""User.
:ivar user_object_id: A user or service principal's object ID.
This is EUPI and may only be logged to warm path telemetry.
:vartype user_object_id: str
:ivar user_pu_id: A user or service principal's PuID.
This is PII and should never be logged.
:vartype user_pu_id: str
:ivar user_idp: A user identity provider. Eg live.com
This is PII and should never be logged.
:vartype user_idp: str
:ivar user_alt_sec_id: A user alternate sec id. This represents the user in a different
identity provider system Eg.1:live.com:puid
This is PII and should never be logged.
:vartype user_alt_sec_id: str
:ivar user_iss: The issuer which issed the token for this user.
This is PII and should never be logged.
:vartype user_iss: str
:ivar user_tenant_id: A user or service principal's tenant ID.
:vartype user_tenant_id: str
:ivar user_name: A user's full name or a service principal's app ID.
This is PII and should never be logged.
:vartype user_name: str
:ivar upn: A user's Principal name (upn)
This is PII andshould never be logged.
:vartype upn: str
"""
_attribute_map = {
'user_object_id': {'key': 'userObjectId', 'type': 'str'},
'user_pu_id': {'key': 'userPuId', 'type': 'str'},
'user_idp': {'key': 'userIdp', 'type': 'str'},
'user_alt_sec_id': {'key': 'userAltSecId', 'type': 'str'},
'user_iss': {'key': 'userIss', 'type': 'str'},
'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
'user_name': {'key': 'userName', 'type': 'str'},
'upn': {'key': 'upn', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword user_object_id: A user or service principal's object ID.
This is EUPI and may only be logged to warm path telemetry.
:paramtype user_object_id: str
:keyword user_pu_id: A user or service principal's PuID.
This is PII and should never be logged.
:paramtype user_pu_id: str
:keyword user_idp: A user identity provider. Eg live.com
This is PII and should never be logged.
:paramtype user_idp: str
:keyword user_alt_sec_id: A user alternate sec id. This represents the user in a different
identity provider system Eg.1:live.com:puid
This is PII and should never be logged.
:paramtype user_alt_sec_id: str
:keyword user_iss: The issuer which issed the token for this user.
This is PII and should never be logged.
:paramtype user_iss: str
:keyword user_tenant_id: A user or service principal's tenant ID.
:paramtype user_tenant_id: str
:keyword user_name: A user's full name or a service principal's app ID.
This is PII and should never be logged.
:paramtype user_name: str
:keyword upn: A user's Principal name (upn)
This is PII andshould never be logged.
:paramtype upn: str
"""
super(User, self).__init__(**kwargs)
self.user_object_id = kwargs.get('user_object_id', None)
self.user_pu_id = kwargs.get('user_pu_id', None)
self.user_idp = kwargs.get('user_idp', None)
self.user_alt_sec_id = kwargs.get('user_alt_sec_id', None)
self.user_iss = kwargs.get('user_iss', None)
self.user_tenant_id = kwargs.get('user_tenant_id', None)
self.user_name = kwargs.get('user_name', None)
self.upn = kwargs.get('upn', None)
class UserAssignedIdentity(msrest.serialization.Model):
"""UserAssignedIdentity.
:ivar principal_id:
:vartype principal_id: str
:ivar client_id:
:vartype client_id: str
"""
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword principal_id:
:paramtype principal_id: str
:keyword client_id:
:paramtype client_id: str
"""
super(UserAssignedIdentity, self).__init__(**kwargs)
self.principal_id = kwargs.get('principal_id', None)
self.client_id = kwargs.get('client_id', None)
class ValidationDataSettings(msrest.serialization.Model):
"""ValidationDataSettings.
:ivar n_cross_validations:
:vartype n_cross_validations: ~flow.models.NCrossValidations
:ivar validation_data_size:
:vartype validation_data_size: float
:ivar cv_split_column_names:
:vartype cv_split_column_names: list[str]
:ivar validation_type:
:vartype validation_type: str
"""
_attribute_map = {
'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'},
'validation_data_size': {'key': 'validationDataSize', 'type': 'float'},
'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'},
'validation_type': {'key': 'validationType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword n_cross_validations:
:paramtype n_cross_validations: ~flow.models.NCrossValidations
:keyword validation_data_size:
:paramtype validation_data_size: float
:keyword cv_split_column_names:
:paramtype cv_split_column_names: list[str]
:keyword validation_type:
:paramtype validation_type: str
"""
super(ValidationDataSettings, self).__init__(**kwargs)
self.n_cross_validations = kwargs.get('n_cross_validations', None)
self.validation_data_size = kwargs.get('validation_data_size', None)
self.cv_split_column_names = kwargs.get('cv_split_column_names', None)
self.validation_type = kwargs.get('validation_type', None)
class VariantIdentifier(msrest.serialization.Model):
"""VariantIdentifier.
:ivar variant_id:
:vartype variant_id: str
:ivar tuning_node_name:
:vartype tuning_node_name: str
"""
_attribute_map = {
'variant_id': {'key': 'variantId', 'type': 'str'},
'tuning_node_name': {'key': 'tuningNodeName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword variant_id:
:paramtype variant_id: str
:keyword tuning_node_name:
:paramtype tuning_node_name: str
"""
super(VariantIdentifier, self).__init__(**kwargs)
self.variant_id = kwargs.get('variant_id', None)
self.tuning_node_name = kwargs.get('tuning_node_name', None)
class VariantNode(msrest.serialization.Model):
"""VariantNode.
:ivar node:
:vartype node: ~flow.models.Node
:ivar description:
:vartype description: str
"""
_attribute_map = {
'node': {'key': 'node', 'type': 'Node'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node:
:paramtype node: ~flow.models.Node
:keyword description:
:paramtype description: str
"""
super(VariantNode, self).__init__(**kwargs)
self.node = kwargs.get('node', None)
self.description = kwargs.get('description', None)
class Volume(msrest.serialization.Model):
"""Volume.
:ivar type:
:vartype type: str
:ivar source:
:vartype source: str
:ivar target:
:vartype target: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type:
:paramtype type: str
:keyword source:
:paramtype source: str
:keyword target:
:paramtype target: str
"""
super(Volume, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.source = kwargs.get('source', None)
self.target = kwargs.get('target', None)
class Webhook(msrest.serialization.Model):
"""Webhook.
:ivar webhook_type: The only acceptable values to pass in are None and "AzureDevOps". The
default value is None.
:vartype webhook_type: str
:ivar event_type:
:vartype event_type: str
"""
_attribute_map = {
'webhook_type': {'key': 'webhookType', 'type': 'str'},
'event_type': {'key': 'eventType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword webhook_type: The only acceptable values to pass in are None and "AzureDevOps". The
default value is None.
:paramtype webhook_type: str
:keyword event_type:
:paramtype event_type: str
"""
super(Webhook, self).__init__(**kwargs)
self.webhook_type = kwargs.get('webhook_type', None)
self.event_type = kwargs.get('event_type', None)
class WebServiceComputeMetaInfo(msrest.serialization.Model):
"""WebServiceComputeMetaInfo.
:ivar node_count:
:vartype node_count: int
:ivar is_ssl_enabled:
:vartype is_ssl_enabled: bool
:ivar aks_not_found:
:vartype aks_not_found: bool
:ivar cluster_purpose:
:vartype cluster_purpose: str
:ivar public_ip_address:
:vartype public_ip_address: str
:ivar vm_size:
:vartype vm_size: str
:ivar location:
:vartype location: str
:ivar provisioning_state:
:vartype provisioning_state: str
:ivar state:
:vartype state: str
:ivar os_type:
:vartype os_type: str
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar created_by_studio:
:vartype created_by_studio: bool
:ivar is_gpu_type:
:vartype is_gpu_type: bool
:ivar resource_id:
:vartype resource_id: str
:ivar compute_type:
:vartype compute_type: str
"""
_attribute_map = {
'node_count': {'key': 'nodeCount', 'type': 'int'},
'is_ssl_enabled': {'key': 'isSslEnabled', 'type': 'bool'},
'aks_not_found': {'key': 'aksNotFound', 'type': 'bool'},
'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'},
'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'created_by_studio': {'key': 'createdByStudio', 'type': 'bool'},
'is_gpu_type': {'key': 'isGpuType', 'type': 'bool'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_count:
:paramtype node_count: int
:keyword is_ssl_enabled:
:paramtype is_ssl_enabled: bool
:keyword aks_not_found:
:paramtype aks_not_found: bool
:keyword cluster_purpose:
:paramtype cluster_purpose: str
:keyword public_ip_address:
:paramtype public_ip_address: str
:keyword vm_size:
:paramtype vm_size: str
:keyword location:
:paramtype location: str
:keyword provisioning_state:
:paramtype provisioning_state: str
:keyword state:
:paramtype state: str
:keyword os_type:
:paramtype os_type: str
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword created_by_studio:
:paramtype created_by_studio: bool
:keyword is_gpu_type:
:paramtype is_gpu_type: bool
:keyword resource_id:
:paramtype resource_id: str
:keyword compute_type:
:paramtype compute_type: str
"""
super(WebServiceComputeMetaInfo, self).__init__(**kwargs)
self.node_count = kwargs.get('node_count', None)
self.is_ssl_enabled = kwargs.get('is_ssl_enabled', None)
self.aks_not_found = kwargs.get('aks_not_found', None)
self.cluster_purpose = kwargs.get('cluster_purpose', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.vm_size = kwargs.get('vm_size', None)
self.location = kwargs.get('location', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.state = kwargs.get('state', None)
self.os_type = kwargs.get('os_type', None)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.created_by_studio = kwargs.get('created_by_studio', None)
self.is_gpu_type = kwargs.get('is_gpu_type', None)
self.resource_id = kwargs.get('resource_id', None)
self.compute_type = kwargs.get('compute_type', None)
class WebServicePort(msrest.serialization.Model):
"""WebServicePort.
:ivar node_id:
:vartype node_id: str
:ivar port_name:
:vartype port_name: str
:ivar name:
:vartype name: str
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword name:
:paramtype name: str
"""
super(WebServicePort, self).__init__(**kwargs)
self.node_id = kwargs.get('node_id', None)
self.port_name = kwargs.get('port_name', None)
self.name = kwargs.get('name', None)
class WorkspaceConnectionSpec(msrest.serialization.Model):
"""WorkspaceConnectionSpec.
:ivar connection_category: Possible values include: "PythonFeed", "ACR", "Git", "S3",
"Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb",
"AzureDataLakeGen2", "Redis", "ApiKey", "AzureOpenAI", "CognitiveSearch", "CognitiveService",
"CustomKeys", "AzureBlob", "AzureOneLake", "CosmosDb", "CosmosDbMongoDbApi",
"AzureDataExplorer", "AzureMariaDb", "AzureDatabricksDeltaLake", "AzureSqlMi",
"AzureTableStorage", "AmazonRdsForOracle", "AmazonRdsForSqlServer", "AmazonRedshift", "Db2",
"Drill", "GoogleBigQuery", "Greenplum", "Hbase", "Hive", "Impala", "Informix", "MariaDb",
"MicrosoftAccess", "MySql", "Netezza", "Oracle", "Phoenix", "PostgreSql", "Presto",
"SapOpenHub", "SapBw", "SapHana", "SapTable", "Spark", "SqlServer", "Sybase", "Teradata",
"Vertica", "Cassandra", "Couchbase", "MongoDbV2", "MongoDbAtlas", "AmazonS3Compatible",
"FileServer", "FtpServer", "GoogleCloudStorage", "Hdfs", "OracleCloudStorage", "Sftp",
"GenericHttp", "ODataRest", "Odbc", "GenericRest", "AmazonMws", "Concur", "Dynamics",
"DynamicsAx", "DynamicsCrm", "GoogleAdWords", "Hubspot", "Jira", "Magento", "Marketo",
"Office365", "Eloqua", "Responsys", "OracleServiceCloud", "PayPal", "QuickBooks", "Salesforce",
"SalesforceServiceCloud", "SalesforceMarketingCloud", "SapCloudForCustomer", "SapEcc",
"ServiceNow", "SharePointOnlineList", "Shopify", "Square", "WebTable", "Xero", "Zoho",
"GenericContainerRegistry".
:vartype connection_category: str or ~flow.models.ConnectionCategory
:ivar flow_value_type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image",
"assistant_definition".
:vartype flow_value_type: str or ~flow.models.ValueType
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar connection_type_display_name:
:vartype connection_type_display_name: str
:ivar config_specs:
:vartype config_specs: list[~flow.models.ConnectionConfigSpec]
:ivar module:
:vartype module: str
"""
_attribute_map = {
'connection_category': {'key': 'connectionCategory', 'type': 'str'},
'flow_value_type': {'key': 'flowValueType', 'type': 'str'},
'connection_type': {'key': 'connectionType', 'type': 'str'},
'connection_type_display_name': {'key': 'connectionTypeDisplayName', 'type': 'str'},
'config_specs': {'key': 'configSpecs', 'type': '[ConnectionConfigSpec]'},
'module': {'key': 'module', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection_category: Possible values include: "PythonFeed", "ACR", "Git", "S3",
"Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb",
"AzureDataLakeGen2", "Redis", "ApiKey", "AzureOpenAI", "CognitiveSearch", "CognitiveService",
"CustomKeys", "AzureBlob", "AzureOneLake", "CosmosDb", "CosmosDbMongoDbApi",
"AzureDataExplorer", "AzureMariaDb", "AzureDatabricksDeltaLake", "AzureSqlMi",
"AzureTableStorage", "AmazonRdsForOracle", "AmazonRdsForSqlServer", "AmazonRedshift", "Db2",
"Drill", "GoogleBigQuery", "Greenplum", "Hbase", "Hive", "Impala", "Informix", "MariaDb",
"MicrosoftAccess", "MySql", "Netezza", "Oracle", "Phoenix", "PostgreSql", "Presto",
"SapOpenHub", "SapBw", "SapHana", "SapTable", "Spark", "SqlServer", "Sybase", "Teradata",
"Vertica", "Cassandra", "Couchbase", "MongoDbV2", "MongoDbAtlas", "AmazonS3Compatible",
"FileServer", "FtpServer", "GoogleCloudStorage", "Hdfs", "OracleCloudStorage", "Sftp",
"GenericHttp", "ODataRest", "Odbc", "GenericRest", "AmazonMws", "Concur", "Dynamics",
"DynamicsAx", "DynamicsCrm", "GoogleAdWords", "Hubspot", "Jira", "Magento", "Marketo",
"Office365", "Eloqua", "Responsys", "OracleServiceCloud", "PayPal", "QuickBooks", "Salesforce",
"SalesforceServiceCloud", "SalesforceMarketingCloud", "SapCloudForCustomer", "SapEcc",
"ServiceNow", "SharePointOnlineList", "Shopify", "Square", "WebTable", "Xero", "Zoho",
"GenericContainerRegistry".
:paramtype connection_category: str or ~flow.models.ConnectionCategory
:keyword flow_value_type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image",
"assistant_definition".
:paramtype flow_value_type: str or ~flow.models.ValueType
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword connection_type_display_name:
:paramtype connection_type_display_name: str
:keyword config_specs:
:paramtype config_specs: list[~flow.models.ConnectionConfigSpec]
:keyword module:
:paramtype module: str
"""
super(WorkspaceConnectionSpec, self).__init__(**kwargs)
self.connection_category = kwargs.get('connection_category', None)
self.flow_value_type = kwargs.get('flow_value_type', None)
self.connection_type = kwargs.get('connection_type', None)
self.connection_type_display_name = kwargs.get('connection_type_display_name', None)
self.config_specs = kwargs.get('config_specs', None)
self.module = kwargs.get('module', None)
| promptflow/src/promptflow/promptflow/azure/_restclient/flow/models/_models.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/azure/_restclient/flow/models/_models.py",
"repo_id": "promptflow",
"token_count": 686164
} | 19 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
"""service_caller.py, module for interacting with the AzureML service."""
import json
import os
import sys
import time
import uuid
from functools import wraps, cached_property
import pydash
from azure.core.exceptions import HttpResponseError, ResourceExistsError
from azure.core.pipeline.policies import RetryPolicy
from promptflow._sdk._telemetry import request_id_context
from promptflow._sdk._telemetry import TelemetryMixin
from promptflow._utils.logger_utils import LoggerFactory
from promptflow.azure._constants._flow import AUTOMATIC_RUNTIME, SESSION_CREATION_TIMEOUT_ENV_VAR
from promptflow.azure._restclient.flow import AzureMachineLearningDesignerServiceClient
from promptflow.azure._utils.gerneral import get_authorization, get_arm_token, get_aml_token
from promptflow.exceptions import UserErrorException, PromptflowException, SystemErrorException
logger = LoggerFactory.get_logger(__name__)
class FlowRequestException(SystemErrorException):
"""FlowRequestException."""
def __init__(self, message, **kwargs):
super().__init__(message, **kwargs)
class RequestTelemetryMixin(TelemetryMixin):
def __init__(self):
super().__init__()
self._refresh_request_id_for_telemetry()
self._from_cli = False
def _get_telemetry_values(self, *args, **kwargs):
return {"request_id": self._request_id, "from_cli": self._from_cli}
def _set_from_cli_for_telemetry(self):
self._from_cli = True
def _refresh_request_id_for_telemetry(self):
# refresh request id from current request id context
self._request_id = request_id_context.get() or str(uuid.uuid4())
def _request_wrapper():
"""Wrapper for request. Will refresh request id and pretty print exception."""
def exception_wrapper(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
if not isinstance(self, RequestTelemetryMixin):
raise PromptflowException(f"Wrapped function is not RequestTelemetryMixin, got {type(self)}")
# refresh request before each request
self._refresh_request_id_for_telemetry()
try:
return func(self, *args, **kwargs)
except HttpResponseError as e:
raise FlowRequestException(
f"Calling {func.__name__} failed with request id: {self._request_id} \n"
f"Status code: {e.status_code} \n"
f"Reason: {e.reason} \n"
f"Error message: {e.message} \n"
)
return wrapper
return exception_wrapper
class FlowServiceCaller(RequestTelemetryMixin):
"""FlowServiceCaller.
:param workspace: workspace
:type workspace: Workspace
:param base_url: base url
:type base_url: Service URL
"""
# The default namespace placeholder is used when namespace is None for get_module API.
DEFAULT_COMPONENT_NAMESPACE_PLACEHOLDER = "-"
DEFAULT_MODULE_WORKING_MECHANISM = "OutputToDataset"
DEFAULT_DATATYPE_MECHANISM = "RegisterBuildinDataTypeOnly"
FLOW_CLUSTER_ADDRESS = "FLOW_CLUSTER_ADDRESS"
WORKSPACE_INDEPENDENT_ENDPOINT_ADDRESS = "WORKSPACE_INDEPENDENT_ENDPOINT_ADDRESS"
DEFAULT_BASE_URL = "https://{}.api.azureml.ms"
MASTER_BASE_API = "https://master.api.azureml-test.ms"
DEFAULT_BASE_REGION = "westus2"
AML_USE_ARM_TOKEN = "AML_USE_ARM_TOKEN"
def __init__(self, workspace, credential, operation_scope, base_url=None, region=None, **kwargs):
"""Initializes DesignerServiceCaller."""
if "get_instance" != sys._getframe().f_back.f_code.co_name:
raise UserErrorException(
"Please use `_FlowServiceCallerFactory.get_instance()` to get service caller "
"instead of creating a new one."
)
super().__init__()
# self._service_context = workspace.service_context
if base_url is None:
# handle vnet scenario, it's discovery url will have workspace id after discovery
base_url = workspace.discovery_url.split("discovery")[0]
# for dev test, change base url with environment variable
base_url = os.environ.get(self.FLOW_CLUSTER_ADDRESS, default=base_url)
self._workspace = workspace
self._operation_scope = operation_scope
self._service_endpoint = base_url
self._credential = credential
retry_policy = RetryPolicy()
# stop retry 500 since it will cause 409 for run creation scenario
retry_policy._retry_on_status_codes.remove(500)
self.caller = AzureMachineLearningDesignerServiceClient(base_url=base_url, retry_policy=retry_policy, **kwargs)
def _get_headers(self):
custom_header = {
"Authorization": get_authorization(credential=self._credential),
"x-ms-client-request-id": self._request_id,
}
return custom_header
def _set_headers_with_user_aml_token(self, headers):
aml_token = get_aml_token(credential=self._credential)
headers["aml-user-token"] = aml_token
def _get_user_identity_info(self):
import jwt
token = get_arm_token(credential=self._credential)
decoded_token = jwt.decode(token, options={"verify_signature": False})
user_object_id, user_tenant_id = decoded_token["oid"], decoded_token["tid"]
return user_object_id, user_tenant_id
@cached_property
def _common_azure_url_pattern(self):
operation_scope = self._operation_scope
pattern = (
f"/subscriptions/{operation_scope.subscription_id}"
f"/resourceGroups/{operation_scope.resource_group_name}"
f"/providers/Microsoft.MachineLearningServices"
f"/workspaces/{operation_scope.workspace_name}"
)
return pattern
@_request_wrapper()
def create_flow(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
experiment_id=None, # type: Optional[str]
body=None, # type: Optional["_models.CreateFlowRequest"]
**kwargs, # type: Any
):
headers = self._get_headers()
return self.caller.flows.create_flow(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
experiment_id=experiment_id,
body=body,
headers=headers,
**kwargs,
)
@_request_wrapper()
def create_component_from_flow(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
body=None, # type: Optional["_models.LoadFlowAsComponentRequest"]
**kwargs, # type: Any
):
headers = self._get_headers()
try:
return self.caller.flows.load_as_component(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
body=body,
headers=headers,
**kwargs,
)
except ResourceExistsError:
return (
f"/subscriptions/{subscription_id}/resourceGroups/{resource_group_name}"
f"/providers/Microsoft.MachineLearningServices/workspaces/{workspace_name}"
f"/components/{body.component_name}/versions/{body.component_version}"
)
@_request_wrapper()
def list_flows(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
experiment_id=None, # type: Optional[str]
owned_only=None, # type: Optional[bool]
flow_type=None, # type: Optional[Union[str, "_models.FlowType"]]
list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]]
**kwargs, # type: Any
):
headers = self._get_headers()
return self.caller.flows.list_flows(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
experiment_id=experiment_id,
owned_only=owned_only,
flow_type=flow_type,
list_view_type=list_view_type,
headers=headers,
**kwargs,
)
@_request_wrapper()
def submit_flow(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
experiment_id, # type: str
endpoint_name=None, # type: Optional[str]
body=None, # type: Optional["_models.SubmitFlowRequest"]
**kwargs, # type: Any
):
headers = self._get_headers()
return self.caller.flows.submit_flow(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
experiment_id=experiment_id,
endpoint_name=endpoint_name,
body=body,
headers=headers,
**kwargs,
)
@_request_wrapper()
def get_flow(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
flow_id, # type: str
experiment_id, # type: str
**kwargs, # type: Any
):
headers = self._get_headers()
return self.caller.flows.get_flow(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
experiment_id=experiment_id,
flow_id=flow_id,
headers=headers,
**kwargs,
)
@_request_wrapper()
def get_flow_run(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
flow_run_id, # type: str
**kwargs, # type: Any
):
"""Get flow run."""
headers = self._get_headers()
return self.caller.bulk_runs.get_flow_run_info(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
flow_run_id=flow_run_id,
headers=headers,
**kwargs,
)
@_request_wrapper()
def create_connection(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
connection_name, # type: str
body=None, # type: Optional["_models.CreateOrUpdateConnectionRequest"]
**kwargs, # type: Any
):
headers = self._get_headers()
return self.caller.connections.create_connection(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
connection_name=connection_name,
body=body,
headers=headers,
**kwargs,
)
@_request_wrapper()
def update_connection(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
connection_name, # type: str
body=None, # type: Optional["_models.CreateOrUpdateConnectionRequestDto"]
**kwargs, # type: Any
):
headers = self._get_headers()
return self.caller.connections.update_connection(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
connection_name=connection_name,
body=body,
headers=headers,
**kwargs,
)
@_request_wrapper()
def get_connection(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
connection_name, # type: str
**kwargs, # type: Any
):
headers = self._get_headers()
return self.caller.connections.get_connection(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
connection_name=connection_name,
headers=headers,
**kwargs,
)
@_request_wrapper()
def delete_connection(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
connection_name, # type: str
**kwargs, # type: Any
):
headers = self._get_headers()
return self.caller.connections.delete_connection(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
connection_name=connection_name,
headers=headers,
**kwargs,
)
@_request_wrapper()
def list_connections(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
**kwargs, # type: Any
):
headers = self._get_headers()
return self.caller.connections.list_connections(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
headers=headers,
**kwargs,
)
@_request_wrapper()
def list_connection_specs(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
**kwargs, # type: Any
):
headers = self._get_headers()
return self.caller.connections.list_connection_specs(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
headers=headers,
**kwargs,
)
@_request_wrapper()
def submit_bulk_run(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
body=None, # type: Optional["_models.SubmitBulkRunRequest"]
**kwargs, # type: Any
):
"""submit_bulk_run.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param body:
:type body: ~flow.models.SubmitBulkRunRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
headers = self._get_headers()
# pass user aml token to flow run submission
self._set_headers_with_user_aml_token(headers)
return self.caller.bulk_runs.submit_bulk_run(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
headers=headers,
body=body,
**kwargs,
)
@_request_wrapper()
def create_flow_session(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
session_id, # type: str
body, # type: Optional["_models.CreateFlowSessionRequest"]
**kwargs, # type: Any
):
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
map_error,
)
from promptflow.azure._restclient.flow.operations._flow_sessions_operations import (
build_create_flow_session_request,
_convert_request,
_models,
)
from promptflow.azure._constants._flow import SESSION_CREATION_TIMEOUT_SECONDS
from promptflow.azure._restclient.flow.models import SetupFlowSessionAction
headers = self._get_headers()
# pass user aml token to session create so user don't need to do authentication again in CI
self._set_headers_with_user_aml_token(headers)
# did not call self.caller.flow_sessions.create_flow_session because it does not support return headers
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
_json = self.caller.flow_sessions._serialize.body(body, "CreateFlowSessionRequest")
request = build_create_flow_session_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
session_id=session_id,
content_type=content_type,
json=_json,
template_url=self.caller.flow_sessions.create_flow_session.metadata["url"],
headers=headers,
)
request = _convert_request(request)
request.url = self.caller.flow_sessions._client.format_url(request.url)
pipeline_response = self.caller.flow_sessions._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self.caller.flow_sessions._deserialize.failsafe_deserialize(
_models.ErrorResponse, pipeline_response
)
raise HttpResponseError(response=response, model=error)
if response.status_code == 200:
return
action = body.action or SetupFlowSessionAction.INSTALL.value
if action == SetupFlowSessionAction.INSTALL.value:
action = "creation"
else:
action = "reset"
logger.info(f"Start polling until session {action} is completed...")
# start polling status here.
if "azure-asyncoperation" not in response.headers:
raise FlowRequestException(
"No polling url found in response headers. "
f"Request id: {headers['x-ms-client-request-id']}. "
f"Response headers: {response.headers}."
)
polling_url = response.headers["azure-asyncoperation"]
time_run = 0
sleep_period = 5
status = None
timeout_seconds = SESSION_CREATION_TIMEOUT_SECONDS
# polling timeout, if user set SESSION_CREATION_TIMEOUT_SECONDS in environment var, use it
if os.environ.get(SESSION_CREATION_TIMEOUT_ENV_VAR):
try:
timeout_seconds = float(os.environ.get(SESSION_CREATION_TIMEOUT_ENV_VAR))
except ValueError:
raise UserErrorException(
"Environment variable {} with value {} set but failed to parse. "
"Please reset the value to a number.".format(
SESSION_CREATION_TIMEOUT_ENV_VAR, os.environ.get(SESSION_CREATION_TIMEOUT_ENV_VAR)
)
)
# InProgress is only known non-terminal status for now.
while status in [None, "InProgress"]:
if time_run + sleep_period > timeout_seconds:
message = (
f"Polling timeout for session {session_id} {action} "
f"for {AUTOMATIC_RUNTIME} after {timeout_seconds} seconds.\n"
f"To proceed the {action} for {AUTOMATIC_RUNTIME}, you can retry using the same flow, "
"and we will continue polling status of previous session. \n"
)
raise Exception(message)
time_run += sleep_period
time.sleep(sleep_period)
response = self.poll_operation_status(url=polling_url, **kwargs)
status = response["status"]
logger.debug(f"Current polling status: {status}")
if time_run % 30 == 0:
# print the message every 30 seconds to avoid users feeling stuck during the operation
print(f"Waiting for session {action}, current status: {status}")
else:
logger.debug(f"Waiting for session {action}, current status: {status}")
if status == "Succeeded":
error_msg = pydash.get(response, "error.message", None)
if error_msg:
logger.warning(
f"Session {action} finished with status {status}. "
f"But there are warnings when installing the packages: {error_msg}."
)
else:
logger.info(f"Session {action} finished with status {status}.")
else:
# refine response error message
try:
response["error"]["message"] = json.loads(response["error"]["message"])
except Exception:
pass
raise FlowRequestException(
f"Session {action} failed for {session_id}. \n"
f"Session {action} status: {status}. \n"
f"Request id: {headers['x-ms-client-request-id']}. \n"
f"{json.dumps(response, indent=2)}."
)
@_request_wrapper()
def poll_operation_status(
self, url, **kwargs # type: Any
):
from azure.core.rest import HttpRequest
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
map_error,
)
from promptflow.azure._restclient.flow.operations._flow_sessions_operations import _models
headers = self._get_headers()
request = HttpRequest(method="GET", url=url, headers=headers, **kwargs)
pipeline_response = self.caller.flow_sessions._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self.caller.flow_sessions._deserialize.failsafe_deserialize(
_models.ErrorResponse, pipeline_response
)
raise HttpResponseError(response=response, model=error)
deserialized = self.caller.flow_sessions._deserialize("object", pipeline_response)
if "status" not in deserialized:
raise FlowRequestException(
f"Status not found in response. Request id: {headers['x-ms-client-request-id']}. "
f"Response headers: {response.headers}."
)
return deserialized
@_request_wrapper()
def get_child_runs(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
flow_run_id, # type: str
index=None, # type: Optional[int]
start_index=None, # type: Optional[int]
end_index=None, # type: Optional[int]
**kwargs, # type: Any
):
"""Get child runs of a flow run."""
headers = self._get_headers()
return self.caller.bulk_runs.get_flow_child_runs(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
flow_run_id=flow_run_id,
index=index,
start_index=start_index,
end_index=end_index,
headers=headers,
**kwargs,
)
@_request_wrapper()
def cancel_flow_run(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
flow_run_id, # type: str
**kwargs, # type: Any
):
"""Cancel a flow run."""
headers = self._get_headers()
return self.caller.bulk_runs.cancel_flow_run(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
flow_run_id=flow_run_id,
headers=headers,
**kwargs,
)
@_request_wrapper()
def get_cosmos_resource_token(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
container_name, # type: str
acquire_write=False, # type: Optional[bool]
**kwargs, # type: Any
):
"""Get Cosmos resource token."""
headers = self._get_headers()
return self.caller.trace_sessions.get_cosmos_resource_token(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
container_name=container_name,
acquire_write=acquire_write,
headers=headers,
**kwargs,
)
| promptflow/src/promptflow/promptflow/azure/_restclient/flow_service_caller.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/azure/_restclient/flow_service_caller.py",
"repo_id": "promptflow",
"token_count": 11631
} | 20 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from typing import Dict
from azure.ai.ml._scope_dependent_operations import OperationConfig, OperationScope, _ScopeDependentOperations
from promptflow._sdk._telemetry import ActivityType, WorkspaceTelemetryMixin, monitor_operation
from promptflow.azure._restclient.flow_service_caller import FlowServiceCaller
class TraceOperations(WorkspaceTelemetryMixin, _ScopeDependentOperations):
""""""
def __init__(
self,
operation_scope: OperationScope,
operation_config: OperationConfig,
service_caller: FlowServiceCaller,
**kwargs: Dict,
):
super().__init__(
operation_scope=operation_scope,
operation_config=operation_config,
workspace_name=operation_scope.workspace_name,
subscription_id=operation_scope.subscription_id,
resource_group_name=operation_scope.resource_group_name,
)
self._service_caller = service_caller
@monitor_operation(activity_name="pfazure.traces._get_cosmos_db_token", activity_type=ActivityType.INTERNALCALL)
def _get_cosmos_db_token(self, container_name: str, acquire_write: bool = False) -> str:
return self._service_caller.get_cosmos_resource_token(
subscription_id=self._operation_scope.subscription_id,
resource_group_name=self._operation_scope.resource_group_name,
workspace_name=self._operation_scope.workspace_name,
container_name=container_name,
acquire_write=acquire_write,
)
| promptflow/src/promptflow/promptflow/azure/operations/_trace_operations.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/azure/operations/_trace_operations.py",
"repo_id": "promptflow",
"token_count": 617
} | 21 |
import base64
import filetype
import hashlib
from typing import Callable, Optional
class PFBytes(bytes):
"""This class is used to represent a bytes object in PromptFlow.
It has all the functionalities of a bytes object,
and also has some additional methods to help with serialization and deserialization.
"""
def __new__(cls, value: bytes, *args, **kwargs):
# Here we must only pass the value to the bytes constructor,
# otherwise we will get a type error that the constructor doesn't take such args.
# See https://docs.python.org/3/reference/datamodel.html#object.__new__
return super().__new__(cls, value)
def __init__(self, value: bytes, mime_type: str, source_url: Optional[str] = None):
# Here the first argument should also be "value", the same as __new__.
# Otherwise we will get error when initialize the object.
super().__init__()
# Use this hash to identify this bytes.
self._hash = hashlib.sha1(value).hexdigest()[:8]
self._mime_type = mime_type.lower()
self._source_url = source_url
@property
def source_url(self):
return self._source_url
def to_base64(self, with_type: bool = False, dict_type: bool = False):
"""Returns the base64 representation of the PFBytes."""
if with_type:
if not dict_type:
return f"data:{self._mime_type};base64," + base64.b64encode(self).decode("utf-8")
return {f"data:{self._mime_type};base64": base64.b64encode(self).decode("utf-8")}
return base64.b64encode(self).decode("utf-8")
class Image(PFBytes):
"""This class is used to represent an image in PromptFlow. It is a subclass of
~promptflow.contracts.multimedia.PFBytes.
"""
def __init__(self, value: bytes, mime_type: str = None, source_url: Optional[str] = None):
if mime_type is None:
mime_type = filetype.guess_mime(value)
if mime_type is None or not mime_type.startswith("image/"):
mime_type = "image/*"
return super().__init__(value, mime_type, source_url)
def __str__(self):
return f"Image({self._hash})"
def __repr__(self) -> str:
return f"Image({self._hash})"
def serialize(self, encoder: Callable = None):
"""Serialize the image to a dictionary."""
if encoder is None:
return self.__str__()
return encoder(self)
| promptflow/src/promptflow/promptflow/contracts/multimedia.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/contracts/multimedia.py",
"repo_id": "promptflow",
"token_count": 977
} | 22 |
import contextvars
import multiprocessing
import os
import queue
import signal
import sys
import threading
import time
from datetime import datetime
from functools import partial
from logging import INFO
from multiprocessing import Manager, Queue
from multiprocessing.pool import ThreadPool
from typing import List, Optional, Union
import psutil
from promptflow._constants import LINE_NUMBER_KEY, LINE_TIMEOUT_SEC
from promptflow._core._errors import ProcessPoolError, UnexpectedError
from promptflow._core.operation_context import OperationContext
from promptflow._core.run_tracker import RunTracker
from promptflow._utils.dataclass_serializer import convert_eager_flow_output_to_dict
from promptflow._utils.exception_utils import ExceptionPresenter
from promptflow._utils.logger_utils import bulk_logger
from promptflow._utils.multimedia_utils import _process_recursively, persist_multimedia_data
from promptflow._utils.thread_utils import RepeatLogTimer
from promptflow._utils.utils import log_progress, set_context
from promptflow.contracts.multimedia import Image
from promptflow.contracts.run_info import FlowRunInfo
from promptflow.contracts.run_info import RunInfo as NodeRunInfo
from promptflow.contracts.run_info import Status
from promptflow.exceptions import ErrorTarget, PromptflowException
from promptflow.executor._errors import (
BatchExecutionTimeoutError,
LineExecutionTimeoutError,
ProcessCrashError,
ProcessInfoObtainedTimeout,
ProcessTerminatedTimeout,
)
from promptflow.executor._process_manager import ForkProcessManager, SpawnProcessManager
from promptflow.executor._result import LineResult
from promptflow.executor._script_executor import ScriptExecutor
from promptflow.executor.flow_executor import DEFAULT_CONCURRENCY_BULK, FlowExecutor
from promptflow.storage import AbstractRunStorage
def signal_handler(signum, frame):
signame = signal.Signals(signum).name
bulk_logger.info("Execution stopping. Handling signal %s (%s)", signame, signum)
try:
process = psutil.Process(os.getpid())
bulk_logger.info("Successfully terminated process with pid %s", process.pid)
process.terminate()
except Exception:
bulk_logger.warning("Error when handling execution stop signal", exc_info=True)
finally:
sys.exit(1)
class QueueRunStorage(AbstractRunStorage):
"""This storage persists run info by putting it into a queue."""
def __init__(self, queue: Queue):
self.queue = queue
def persist_node_run(self, run_info: NodeRunInfo):
self.queue.put(run_info)
def persist_flow_run(self, run_info: FlowRunInfo):
self.queue.put(run_info)
def format_current_process_info(process_name, pid, line_number: int):
return f"Process name({process_name})-Process id({pid})-Line number({line_number})"
def log_process_status(process_name, pid, line_number: int, is_completed=False, is_failed=False):
process_info = format_current_process_info(process_name, pid, line_number)
if is_completed:
bulk_logger.info(f"{process_info} completed.")
elif is_failed:
bulk_logger.info(f"{process_info} failed.")
else:
bulk_logger.info(f"{process_info} start execution.")
class LineExecutionProcessPool:
_DEFAULT_WORKER_COUNT = 4
_PROCESS_TERMINATED_TIMEOUT = 60
_PROCESS_INFO_OBTAINED_TIMEOUT = 60
def __init__(
self,
flow_executor: FlowExecutor,
nlines,
run_id,
output_dir,
batch_timeout_sec: Optional[int] = None,
line_timeout_sec: Optional[int] = None,
worker_count: Optional[int] = None,
):
self._nlines = nlines
self._run_id = run_id
multiprocessing_start_method = os.environ.get("PF_BATCH_METHOD", multiprocessing.get_start_method())
sys_start_methods = multiprocessing.get_all_start_methods()
if multiprocessing_start_method not in sys_start_methods:
bulk_logger.warning(
f"Failed to set start method to '{multiprocessing_start_method}', "
f"start method {multiprocessing_start_method} is not in: {sys_start_methods}."
)
bulk_logger.info(f"Set start method to default {multiprocessing.get_start_method()}.")
multiprocessing_start_method = multiprocessing.get_start_method()
use_fork = multiprocessing_start_method in ["fork", "forkserver"]
self._flow_file = flow_executor._flow_file
self._connections = flow_executor._connections
self._working_dir = flow_executor._working_dir
self._use_fork = use_fork
if isinstance(flow_executor, ScriptExecutor):
self._storage = flow_executor._storage
else:
self._storage = flow_executor._run_tracker._storage
self._flow_id = flow_executor._flow_id
self._log_interval = flow_executor._log_interval
self._line_timeout_sec = line_timeout_sec or LINE_TIMEOUT_SEC
self._batch_timeout_sec = batch_timeout_sec
self._output_dir = output_dir
self._flow_create_kwargs = {
"flow_file": flow_executor._flow_file,
"connections": flow_executor._connections,
"working_dir": flow_executor._working_dir,
"line_timeout_sec": self._line_timeout_sec,
"raise_ex": False,
}
# Will set to True if the batch run is timeouted.
self._is_timeout = False
self._worker_count = self._determine_worker_count(worker_count)
def __enter__(self):
manager = Manager()
self._processing_idx = manager.dict()
self._completed_idx = manager.dict()
self._task_queue = Queue()
self._n_process = self._worker_count
# When using fork, we first spawn a sub process, the SemLock created in fork context (multiprocessing.Queue())
# can't used in a spawn context. Since spawn does not share memory, synchronization primitives created by
# fork cannot be used directly. It will cause an error: "A SemLock created in a fork context is being
# shared with a process in a spawn context. This is not supported".
# So use multiprocessing.Manager().Queue() instead of multiprocessing.Queue().
# Manager().Queue() operates through a manager server process, which passes messages between different
# processes without directly sharing memory state, which makes it safe to use in a spawn context.
self._input_queues = [manager.Queue() for _ in range(self._n_process)]
self._output_queues = [manager.Queue() for _ in range(self._n_process)]
self._control_signal_queue = manager.Queue()
self._process_info = manager.dict()
# when using fork, we first create a process with spawn method to establish a clean environment
# Then fork the subprocess in this environment to avoid some deadlock problems
common_kwargs = {
"input_queues": self._input_queues,
"output_queues": self._output_queues,
"process_info": self._process_info,
"process_target_func": _process_wrapper,
}
if self._use_fork:
# 1. Create input_queue, output_queue, control_signal_queue and _process_info in the main process.
# 2. Pass the above queue/dict as parameters to spawn and fork processes to transfer information
# between processes.
self._processes_manager = ForkProcessManager(
self._control_signal_queue,
self._flow_create_kwargs,
**common_kwargs,
)
else:
executor_creation_func = partial(FlowExecutor.create, **self._flow_create_kwargs)
# 1. Create input_queue, output_queue, and _process_info in the main process.
# 2. Spawn _n_process sub-process and pass the above queue/dict to these sub-process to transfer information
# between main process and sub process.
self._processes_manager = SpawnProcessManager(executor_creation_func, **common_kwargs)
self._processes_manager.start_processes()
self._processes_manager.ensure_healthy()
monitor_pool = ThreadPool(self._n_process, initializer=set_context, initargs=(contextvars.copy_context(),))
self._monitor_pool = monitor_pool
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self._monitor_pool is not None:
self._monitor_pool.close()
self._monitor_pool.join()
@property
def is_timeout(self):
return self._is_timeout
def _get_process_info(self, index):
start_time = time.time()
while True:
self._processes_manager.ensure_healthy()
try:
if time.time() - start_time > self._PROCESS_INFO_OBTAINED_TIMEOUT:
raise ProcessInfoObtainedTimeout(self._PROCESS_INFO_OBTAINED_TIMEOUT)
# Try to get process id and name from the process_info
process_id = self._process_info[index].process_id
process_name = self._process_info[index].process_name
return (index, process_id, process_name)
except KeyError:
# If the process_info does not exist for the given index, it means the process have not ready yet,
# try again.
time.sleep(1)
continue
except Exception as e:
raise Exception(f"Unexpected error occurred while get process info. Exception: {e}")
def _ensure_process_terminated_within_timeout(self, process_id):
start_time = time.time()
while psutil.pid_exists(process_id):
if time.time() - start_time > self._PROCESS_TERMINATED_TIMEOUT:
raise ProcessTerminatedTimeout(self._PROCESS_TERMINATED_TIMEOUT)
time.sleep(1)
def _is_process_alive(self, process_id):
return psutil.pid_exists(process_id)
def _handle_output_queue_messages(self, output_queue: Queue, result_list: List[LineResult]):
try:
message = output_queue.get(timeout=1)
if isinstance(message, LineResult):
message = self._process_multimedia(message)
result_list.append(message)
return message
elif isinstance(message, FlowRunInfo):
self._storage.persist_flow_run(message)
return message
elif isinstance(message, NodeRunInfo):
self._storage.persist_node_run(message)
return message
except queue.Empty:
pass
return None
def _monitor_workers_and_process_tasks_in_thread(
self,
task_queue: Queue,
result_list: List[LineResult],
index: int,
input_queue: Queue,
output_queue: Queue,
batch_start_time: datetime,
):
index, process_id, process_name = self._get_process_info(index)
# Entering the while loop requires two conditions:
# 1. The task queue is not empty, meaning there are lines yet to be executed.
# 2. The batch run has not reached the batch timeout limit.
while not self._batch_timeout_expired(batch_start_time):
self._processes_manager.ensure_healthy()
try:
# Get task from task_queue
inputs, line_number, run_id = task_queue.get(timeout=1)
except queue.Empty:
break
# Calculate the line timeout for the current line.
line_timeout_sec = self._line_timeout_sec
if self._batch_timeout_sec:
remaining_execution_time = (
self._batch_timeout_sec - (datetime.utcnow() - batch_start_time).total_seconds()
)
if remaining_execution_time <= 0:
self._is_timeout = True
break
line_timeout_sec = min(line_timeout_sec, remaining_execution_time)
# Put task into input_queue
args = (inputs, line_number, run_id, line_timeout_sec)
input_queue.put(args)
self._processing_idx[line_number] = format_current_process_info(process_name, process_id, line_number)
log_process_status(process_name, process_id, line_number)
start_time = datetime.utcnow()
completed = False
crashed = False
returned_node_run_infos = {}
# Responsible for checking the output queue messages and processing them within a specified timeout period.
while not self._batch_timeout_expired(batch_start_time) and not self._line_timeout_expired(start_time):
# Monitor process aliveness.
crashed = not self._is_process_alive(process_id)
if crashed:
break
# Handle output queue message.
message = self._handle_output_queue_messages(output_queue, result_list)
if isinstance(message, LineResult):
completed = True
break
if isinstance(message, NodeRunInfo):
returned_node_run_infos[message.node] = message
# Handle line execution completed.
if completed:
self._completed_idx[line_number] = format_current_process_info(process_name, process_id, line_number)
log_process_status(process_name, process_id, line_number, is_completed=True)
# Handle line execution is not completed.
else:
ex = None
# Handle process crashed.
if crashed:
bulk_logger.warning(f"Process crashed while executing line {line_number}.")
ex = ProcessCrashError(line_number)
else:
# Handle line execution timeout.
if self._line_timeout_expired(start_time):
bulk_logger.warning(f"Line {line_number} timeout after {self._line_timeout_sec} seconds.")
ex = LineExecutionTimeoutError(line_number, self._line_timeout_sec)
# Handle batch execution timeout.
if self._batch_timeout_expired(batch_start_time):
bulk_logger.warning(
f"Line {line_number} execution terminated due to the total "
f"batch run exceeding the batch timeout ({self._batch_timeout_sec}s)."
)
ex = BatchExecutionTimeoutError(line_number, self._batch_timeout_sec)
# Set is_timeout to True if the batch run exceeds the batch timeout.
self._is_timeout = True
# This branch should not be reached, add this warning for the case.
if ex is None:
msg = f"Unexpected error occurred while monitoring line execution at line {line_number}."
bulk_logger.warning(msg)
ex = UnexpectedError(msg)
result = self._generate_line_result_for_exception(
inputs,
run_id,
line_number,
self._flow_id,
start_time,
ex,
returned_node_run_infos,
)
result_list.append(result)
self._completed_idx[line_number] = format_current_process_info(process_name, process_id, line_number)
log_process_status(process_name, process_id, line_number, is_failed=True)
# If there are still tasks in the task_queue and the batch run does not exceed the batch timeout,
# restart a new process to execute the task.
run_finished = task_queue.empty() or self._batch_timeout_expired(batch_start_time)
if not run_finished:
self._processes_manager.restart_process(index)
# We need to ensure the process has been killed before continuing to execute.
# Otherwise the process will receive new task, and during the execution, the process
# is killed, which will result in the 'ProcessCrashError'.
self._ensure_process_terminated_within_timeout(process_id)
index, process_id, process_name = self._get_process_info(index)
self._processing_idx.pop(line_number)
# If the while loop exits due to batch run timeout, we should set is_timeout to True if we didn't set it before.
self._is_timeout = self._is_timeout or self._batch_timeout_expired(batch_start_time)
# End the process when the batch timeout is exceeded or when all lines have been executed.
self._processes_manager.end_process(index)
# In fork mode, the main process and the sub spawn process communicate through _process_info.
# We need to ensure the process has been killed before returning. Otherwise, it may cause
# the main process have exited but the spawn process is still alive.
# At this time, a connection error will be reported.
self._ensure_process_terminated_within_timeout(process_id)
def _batch_timeout_expired(self, start_time: datetime) -> bool:
if self._batch_timeout_sec is None:
return False
return (datetime.utcnow() - start_time).total_seconds() > self._batch_timeout_sec + 10
def _line_timeout_expired(self, start_time: datetime) -> bool:
# Here we add more seconds because of the following reasons:
# 1. At the last second, there would be several timeout message from exec_line.
# 2. It may take time to create worker so actual timeout time may be longer.
return (datetime.utcnow() - start_time).total_seconds() > self._line_timeout_sec + 10
def _process_multimedia(self, result: LineResult) -> LineResult:
"""Replace multimedia data in line result with string place holder to prevent OOM
and persist multimedia data in output when batch running."""
if not self._output_dir:
return result
self._process_multimedia_in_flow_run(result.run_info)
for node_name, node_run_info in result.node_run_infos.items():
result.node_run_infos[node_name] = self._process_multimedia_in_node_run(node_run_info)
result.output = persist_multimedia_data(result.output, self._output_dir)
return result
def _process_multimedia_in_run_info(self, run_info: Union[FlowRunInfo, NodeRunInfo]):
# Persist and convert images in inputs to path dictionaries.
# This replaces any image objects with their corresponding file path dictionaries.
if run_info.inputs:
run_info.inputs = self._persist_and_convert_images_to_path_dicts(run_info.inputs)
# Persist and convert images in output to path dictionaries.
# This replaces any image objects with their corresponding file path dictionaries.
if run_info.output:
serialized_output = self._persist_and_convert_images_to_path_dicts(run_info.output)
run_info.output = serialized_output
run_info.result = None
# Persist and convert images in api_calls to path dictionaries.
# The `inplace=True` parameter is used here to ensure that the original list structure holding generator outputs
# is maintained. This allows us to keep tracking the list as it dynamically changes when the generator is
# consumed. It is crucial to process the api_calls list in place to avoid losing the reference to the list that
# holds the generator items, which is essential for tracing generator execution.
if run_info.api_calls:
run_info.api_calls = self._persist_and_convert_images_to_path_dicts(run_info.api_calls, inplace=True)
return run_info
def _process_multimedia_in_flow_run(self, run_info: FlowRunInfo):
self._process_multimedia_in_run_info(run_info)
def _process_multimedia_in_node_run(self, run_info: NodeRunInfo):
run_info = self._process_multimedia_in_run_info(run_info)
return run_info
def _persist_and_convert_images_to_path_dicts(self, value, inplace=False):
serialization_funcs = {Image: partial(Image.serialize, **{"encoder": None})}
return _process_recursively(value, process_funcs=serialization_funcs, inplace=inplace)
def _generate_line_result_for_exception(
self,
inputs,
run_id,
line_number,
flow_id,
start_time,
ex,
node_run_infos={},
) -> LineResult:
bulk_logger.error(f"Line {line_number}, Process {os.getpid()} failed with exception: {ex}")
run_info = FlowRunInfo(
run_id=f"{run_id}_{line_number}",
status=Status.Failed,
error=ExceptionPresenter.create(ex).to_dict(include_debug_info=True),
inputs=inputs,
output=None,
metrics=None,
request=None,
parent_run_id=run_id,
root_run_id=run_id,
source_run_id=None,
flow_id=flow_id,
start_time=start_time,
end_time=datetime.utcnow(),
index=line_number,
)
result = LineResult(
output={},
aggregation_inputs={},
run_info=run_info,
node_run_infos=node_run_infos,
)
# TODO: There is a corner case that the run info is persisted in the subprocess when timeouted,
# while we also persist the run info here. This may cause duplicate run info in the storage.
# We need to find a way to avoid this.
self._storage.persist_flow_run(result.run_info)
return result
def run(self, batch_inputs):
for index, inputs in batch_inputs:
self._task_queue.put(
(
inputs,
index,
self._run_id,
)
)
result_list = []
run_start_time = datetime.utcnow()
with RepeatLogTimer(
interval_seconds=self._log_interval,
logger=bulk_logger,
level=INFO,
log_message_function=self._generate_thread_status_messages,
args=(
self._monitor_pool,
self._nlines,
),
):
try:
batch_start_time = datetime.utcnow()
args_list = [
(
self._task_queue, # Shared task queue for all sub processes to read the input data.
result_list, # Line result list of the batch run.
i, # Index of the sub process.
# Specific input queue for sub process, used to send input data to it.
self._input_queues[i],
# Specific output queue for the sub process, used to receive results from it.
self._output_queues[i],
batch_start_time,
)
for i in range(self._n_process)
]
# The variable 'async_result' here is not the actual result of the batch run
# but an AsyncResult object that can be used to check if the execution are finished
# The actual results of the batch run are stored in 'result_list'
# Create _n_process monitoring threads, mainly used to assign tasks and receive line result.
# When task_queue is empty, end the process.
# When line execution timeout or process crash, restart the process.
async_result = self._monitor_pool.starmap_async(
self._monitor_workers_and_process_tasks_in_thread, args_list
)
try:
# Only log when the number of results changes to avoid duplicate logging.
last_log_count = 0
# Wait for batch run to complete or KeyboardInterrupt
while not async_result.ready():
current_result_count = len(result_list)
if current_result_count != last_log_count:
log_progress(
run_start_time=run_start_time,
logger=bulk_logger,
count=len(result_list),
total_count=self._nlines,
)
last_log_count = current_result_count
# Check every 1 second
async_result.wait(1)
# To ensure exceptions in thread-pool calls are propagated to the main process for proper handling
# The exceptions raised will be re-raised by the get() method.
# Related link:
# https://docs.python.org/3/library/multiprocessing.html#multiprocessing.pool.AsyncResult
async_result.get()
except KeyboardInterrupt:
raise
except PromptflowException:
raise
except Exception as e:
bulk_logger.error(f"ProcessPool failed with exception: {e}")
raise ProcessPoolError(
message_format=f"ProcessPool failed with exception: {e}",
target=ErrorTarget.EXECUTOR,
) from e
return result_list
def _generate_thread_status_messages(self, pool: ThreadPool, total_count: int):
msgs = []
active_threads = sum(thread.is_alive() for thread in pool._pool)
msgs.append(f"[Process Pool] [Active processes: {active_threads} / {len(pool._pool)}]")
processing_lines_copy = self._processing_idx.copy()
completed_lines_copy = self._completed_idx.copy()
msgs.append(
f"[Lines] [Finished: {len(completed_lines_copy)}] [Processing: {len(processing_lines_copy)}] "
f"[Pending: {total_count - len(processing_lines_copy) - len(completed_lines_copy)}]"
)
lines = []
for idx, thread_name in sorted(processing_lines_copy.items()):
lines.append(f"line {idx} ({thread_name})")
if len(lines) > 0:
msgs.append("Processing Lines: " + ", ".join(lines) + ".")
return msgs
def _determine_worker_count(self, worker_count):
# Starting a new process in non-fork mode requires to allocate memory.
# Calculate the maximum number of processes based on available memory to avoid memory bursting.
estimated_available_worker_count = get_available_max_worker_count() if not self._use_fork else None
# If the environment variable PF_WORKER_COUNT exists and valid, use the value as the worker_count.
if worker_count is not None and worker_count > 0:
self._log_set_worker_count(worker_count, estimated_available_worker_count)
return worker_count
# If the environment variable PF_WORKER_COUNT is not set or invalid, take the minimum value among the
# factors: default_worker_count, row_count and estimated_worker_count_based_on_memory_usage
factors = {
"default_worker_count": self._DEFAULT_WORKER_COUNT,
"row_count": self._nlines,
"estimated_worker_count_based_on_memory_usage": estimated_available_worker_count,
}
valid_factors = {k: v for k, v in factors.items() if v is not None and v > 0}
# Take the minimum value as the result
worker_count = min(valid_factors.values())
bulk_logger.info(
f"Set process count to {worker_count} by taking the minimum value among the factors of {valid_factors}."
)
return worker_count
def _log_set_worker_count(self, worker_count, estimated_available_worker_count):
bulk_logger.info(f"Set process count to {worker_count}.")
if estimated_available_worker_count is not None and estimated_available_worker_count < worker_count:
bulk_logger.warning(
f"The current process count ({worker_count}) is larger than recommended process count "
f"({estimated_available_worker_count}) that estimated by system available memory. This may "
f"cause memory exhaustion"
)
def _exec_line(
executor: FlowExecutor, output_queue: Queue, *, inputs: dict, run_id: str, index: int, line_timeout_sec: int
):
try:
line_result = executor.exec_line(
inputs=inputs,
run_id=run_id,
index=index,
node_concurrency=DEFAULT_CONCURRENCY_BULK,
line_timeout_sec=line_timeout_sec,
)
if line_result is not None:
# For eager flow, the output may be a dataclass which is not picklable, we need to convert it to dict.
if not isinstance(line_result.output, dict):
line_result.output = convert_eager_flow_output_to_dict(line_result.output)
line_result.output.pop(LINE_NUMBER_KEY, None)
# TODO: Put serialized line result into queue to catch serialization error beforehand.
# Otherwise it might cause the process to hang, e.g, line failed because output is not seralizable.
if line_result is not None and line_result.run_info.status == Status.Failed:
line_result.output = {}
return line_result
except Exception as e:
bulk_logger.error(f"Line {index}, Process {os.getpid()} failed with exception: {e}")
flow_id = executor._flow_id
line_run_id = run_id if index is None else f"{run_id}_{index}"
# If line execution failed before start, there is no flow information in the run_tracker.
# So we call start_flow_run before handling exception to make sure the run_tracker has flow info.
if isinstance(executor, ScriptExecutor):
run_tracker = RunTracker(executor._storage)
else:
run_tracker = executor._run_tracker
run_tracker.start_flow_run(flow_id, run_id, line_run_id, run_id)
run_info = run_tracker.end_run(f"{run_id}_{index}", ex=e)
output_queue.put(run_info)
result = LineResult(
output={},
aggregation_inputs={},
run_info=run_info,
node_run_infos={},
)
return result
def _process_wrapper(
executor_creation_func,
input_queue: Queue,
output_queue: Queue,
log_context_initialization_func,
operation_contexts_dict: dict,
):
if threading.current_thread() is threading.main_thread():
signal.signal(signal.SIGINT, signal_handler)
else:
bulk_logger.info("Current thread is not main thread, skip signal handler registration in batch process pool.")
OperationContext.get_instance().update(operation_contexts_dict) # Update the operation context for the new process.
if log_context_initialization_func:
with log_context_initialization_func():
exec_line_for_queue(executor_creation_func, input_queue, output_queue)
else:
exec_line_for_queue(executor_creation_func, input_queue, output_queue)
def create_executor_fork(*, flow_executor: FlowExecutor, storage: AbstractRunStorage):
if isinstance(flow_executor, ScriptExecutor):
return ScriptExecutor(
flow_file=flow_executor._flow_file,
connections=flow_executor._connections,
working_dir=flow_executor._working_dir,
storage=storage,
)
else:
run_tracker = RunTracker(run_storage=storage)
return FlowExecutor(
flow=flow_executor._flow,
connections=flow_executor._connections,
run_tracker=run_tracker,
cache_manager=flow_executor._cache_manager,
loaded_tools=flow_executor._loaded_tools,
raise_ex=False,
line_timeout_sec=flow_executor._line_timeout_sec,
)
def exec_line_for_queue(executor_creation_func, input_queue: Queue, output_queue: Queue):
run_storage = QueueRunStorage(output_queue)
executor: FlowExecutor = executor_creation_func(storage=run_storage)
while True:
try:
inputs, line_number, run_id, line_timeout_sec = input_queue.get(timeout=1)
result = _exec_line(
executor=executor,
output_queue=output_queue,
inputs=inputs,
run_id=run_id,
index=line_number,
line_timeout_sec=line_timeout_sec,
)
output_queue.put(result)
except queue.Empty:
# Do nothing until the input_queue have content or process is killed
# TODO: Exit the process more gracefully.
pass
def get_available_max_worker_count():
pid = os.getpid()
mem_info = psutil.virtual_memory()
available_memory = mem_info.available / (1024 * 1024) # in MB
process = psutil.Process(pid)
process_memory_info = process.memory_info()
process_memory = process_memory_info.rss / (1024 * 1024) # in MB
estimated_available_worker_count = int(available_memory // process_memory)
if estimated_available_worker_count < 1:
# TODO: For the case of vector db, Optimize execution logic
# 1. Let the main process not consume memory because it does not actually invoke
# 2. When the degree of parallelism is 1, main process executes the task directly and not
# create the child process
bulk_logger.warning(
f"Current system's available memory is {available_memory}MB, less than the memory "
f"{process_memory}MB required by the process. The maximum available worker count is 1."
)
estimated_available_worker_count = 1
else:
bulk_logger.info(
f"Current system's available memory is {available_memory}MB, "
f"memory consumption of current process is {process_memory}MB, "
f"estimated available worker count is {available_memory}/{process_memory} "
f"= {estimated_available_worker_count}"
)
return estimated_available_worker_count
| promptflow/src/promptflow/promptflow/executor/_line_execution_process_pool.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/executor/_line_execution_process_pool.py",
"repo_id": "promptflow",
"token_count": 14805
} | 23 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import asyncio
import contextlib
import copy
import functools
import inspect
import os
import uuid
from pathlib import Path
from threading import current_thread
from types import GeneratorType
from typing import Any, Callable, Dict, List, Mapping, Optional, Tuple
from opentelemetry.trace.status import StatusCode
from promptflow._constants import LINE_NUMBER_KEY
from promptflow._core._errors import NotSupported, UnexpectedError
from promptflow._core.cache_manager import AbstractCacheManager
from promptflow._core.flow_execution_context import FlowExecutionContext
from promptflow._core.metric_logger import add_metric_logger, remove_metric_logger
from promptflow._core.openai_injector import inject_openai_api
from promptflow._core.operation_context import OperationContext
from promptflow._core.run_tracker import RunTracker
from promptflow._core.tool import STREAMING_OPTION_PARAMETER_ATTR
from promptflow._core.tools_manager import ToolsManager
from promptflow._core.tracer import (
enrich_span_with_context,
enrich_span_with_input,
enrich_span_with_openai_tokens,
enrich_span_with_output,
open_telemetry_tracer,
)
from promptflow._utils.context_utils import _change_working_dir
from promptflow._utils.execution_utils import (
apply_default_value_for_input,
collect_lines,
get_aggregation_inputs_properties,
)
from promptflow._utils.logger_utils import flow_logger, logger
from promptflow._utils.multimedia_utils import (
load_multimedia_data,
load_multimedia_data_recursively,
persist_multimedia_data,
)
from promptflow._utils.utils import get_int_env_var, transpose
from promptflow._utils.yaml_utils import load_yaml
from promptflow.contracts.flow import Flow, FlowInputDefinition, InputAssignment, InputValueType, Node
from promptflow.contracts.run_info import FlowRunInfo, Status
from promptflow.contracts.run_mode import RunMode
from promptflow.contracts.trace import TraceType
from promptflow.exceptions import PromptflowException
from promptflow.executor import _input_assignment_parser
from promptflow.executor._async_nodes_scheduler import AsyncNodesScheduler
from promptflow.executor._errors import NodeOutputNotFound, OutputReferenceNotExist, SingleNodeValidationError
from promptflow.executor._flow_nodes_scheduler import (
DEFAULT_CONCURRENCY_BULK,
DEFAULT_CONCURRENCY_FLOW,
FlowNodesScheduler,
)
from promptflow.executor._result import AggregationResult, LineResult
from promptflow.executor._tool_resolver import ToolResolver
from promptflow.executor.flow_validator import FlowValidator
from promptflow.storage import AbstractRunStorage
from promptflow.storage._run_storage import DefaultRunStorage
class FlowExecutor:
"""This class is used to execute a single flow for different inputs.
:param flow: The flow to be executed.
:type flow: ~promptflow.contracts.flow.Flow
:param connections: The connections to be used for the flow.
:type connections: dict
:param run_tracker: The run tracker to be used for the flow.
:type run_tracker: ~promptflow._core.run_tracker.RunTracker
:param cache_manager: The cache manager to be used for the flow.
:type cache_manager: ~promptflow._core.cache_manager.AbstractCacheManager
:param loaded_tools: The loaded tools to be used for the flow.
:type loaded_tools: Mapping[str, Callable]
:param worker_count: The number of workers to be used for the flow. Default is 16.
:type worker_count: Optional[int]
:param raise_ex: Whether to raise exceptions or not. Default is False.
:type raise_ex: Optional[bool]
:param working_dir: The working directory to be used for the flow. Default is None.
:type working_dir: Optional[str]
:param line_timeout_sec: The line timeout in seconds to be used for the flow. Default is LINE_TIMEOUT_SEC.
:type line_timeout_sec: Optional[int]
:param flow_file: The flow file to be used for the flow. Default is None.
:type flow_file: Optional[Path]
"""
def __init__(
self,
flow: Flow,
connections: dict,
run_tracker: RunTracker,
cache_manager: AbstractCacheManager,
loaded_tools: Mapping[str, Callable],
*,
raise_ex: bool = False,
working_dir=None,
line_timeout_sec=None,
flow_file=None,
):
"""Initialize a FlowExecutor object.
:param flow: The Flow object to execute.
:type flow: ~promptflow.contracts.flow.Flow
:param connections: The connections between nodes in the Flow.
:type connections: dict
:param run_tracker: The RunTracker object to track the execution of the Flow.
:type run_tracker: ~promptflow._core.run_tracker.RunTracker
:param cache_manager: The AbstractCacheManager object to manage caching of results.
:type cache_manager: ~promptflow._core.cache_manager.AbstractCacheManager
:param loaded_tools: A mapping of tool names to their corresponding functions.
:type loaded_tools: Mapping[str, Callable]
:param raise_ex: Whether to raise an exception if an error occurs during execution.
:type raise_ex: bool
:param working_dir: The working directory to use for execution.
:type working_dir: str or None
:param line_timeout_sec: The maximum time to wait for a line of output from a node.
:type line_timeout_sec: int or None
:param flow_file: The path to the file containing the Flow definition.
:type flow_file: str or None
"""
# Inject OpenAI API to make sure traces and headers injection works and
# update OpenAI API configs from environment variables.
inject_openai_api()
self._flow = flow
self._flow_id = flow.id or str(uuid.uuid4())
self._connections = connections
self._aggregation_inputs_references = get_aggregation_inputs_properties(flow)
self._aggregation_nodes = {node.name for node in self._flow.nodes if node.aggregation}
self._run_tracker = run_tracker
self._cache_manager = cache_manager
self._loaded_tools = loaded_tools
self._working_dir = working_dir
self._line_timeout_sec = line_timeout_sec or get_int_env_var("PF_LINE_TIMEOUT_SEC")
self._flow_file = flow_file
try:
self._tools_manager = ToolsManager(loaded_tools)
tool_to_meta = {tool.name: tool for tool in flow.tools}
custom_tools = {
node.name: self._tools_manager._load_custom_tool(tool_to_meta[node.tool], node.name)
for node in flow.nodes
if not self._tools_manager.loaded(node.name)
}
self._tools_manager.load_tools(custom_tools)
except PromptflowException as e:
# For PromptflowException, we don't wrap it, because need generate ErrorResponse by inner exception.
# Will try to find one common way to handle this case.
raise e
except Exception as e:
raise ValueError(f"Failed to load custom tools for flow due to exception:\n {e}.") from e
for node in flow.nodes:
self._tools_manager.assert_loaded(node.name)
self._raise_ex = raise_ex
self._log_interval = 60
self._processing_idx = None
self._completed_idx = None
# TODO: Improve the experience about configuring node concurrency.
self._node_concurrency = DEFAULT_CONCURRENCY_BULK
@classmethod
def create(
cls,
flow_file: Path,
connections: dict,
working_dir: Optional[Path] = None,
*,
entry: Optional[str] = None,
storage: Optional[AbstractRunStorage] = None,
raise_ex: bool = True,
node_override: Optional[Dict[str, Dict[str, Any]]] = None,
line_timeout_sec: Optional[int] = None,
) -> "FlowExecutor":
"""Create a new instance of FlowExecutor.
:param flow_file: The path to the flow file.
:type flow_file: Path
:param connections: The connections to be used for the flow.
:type connections: dict
:param working_dir: The working directory to be used for the flow. Default is None.
:type working_dir: Optional[str]
:param func: The function to be used for the flow if .py is provided. Default is None.
:type func: Optional[str]
:param storage: The storage to be used for the flow. Default is None.
:type storage: Optional[~promptflow.storage.AbstractRunStorage]
:param raise_ex: Whether to raise exceptions or not. Default is True.
:type raise_ex: Optional[bool]
:param node_override: The node overrides to be used for the flow. Default is None.
:type node_override: Optional[Dict[str, Dict[str, Any]]]
:param line_timeout_sec: The line timeout in seconds to be used for the flow. Default is LINE_TIMEOUT_SEC.
:type line_timeout_sec: Optional[int]
:return: A new instance of FlowExecutor.
:rtype: ~promptflow.executor.flow_executor.FlowExecutor
"""
if cls._is_eager_flow_yaml(flow_file, working_dir):
from ._script_executor import ScriptExecutor
return ScriptExecutor(
flow_file=Path(flow_file),
working_dir=working_dir,
storage=storage,
)
else:
flow = Flow.from_yaml(flow_file, working_dir=working_dir)
return cls._create_from_flow(
flow_file=flow_file,
flow=flow,
connections=connections,
working_dir=working_dir,
storage=storage,
raise_ex=raise_ex,
node_override=node_override,
line_timeout_sec=line_timeout_sec,
)
@classmethod
def _create_from_flow(
cls,
flow: Flow,
connections: dict,
working_dir: Optional[Path],
*,
flow_file: Optional[Path] = None,
storage: Optional[AbstractRunStorage] = None,
raise_ex: bool = True,
node_override: Optional[Dict[str, Dict[str, Any]]] = None,
line_timeout_sec: Optional[int] = None,
):
logger.debug("Start initializing the flow executor.")
working_dir = Flow._resolve_working_dir(flow_file, working_dir)
if node_override:
flow = flow._apply_node_overrides(node_override)
flow = flow._apply_default_node_variants()
package_tool_keys = [node.source.tool for node in flow.nodes if node.source and node.source.tool]
tool_resolver = ToolResolver(working_dir, connections, package_tool_keys)
with _change_working_dir(working_dir):
resolved_tools = [tool_resolver.resolve_tool_by_node(node) for node in flow.nodes]
flow = Flow(
flow.id, flow.name, [r.node for r in resolved_tools], inputs=flow.inputs, outputs=flow.outputs, tools=[]
)
# ensure_flow_valid including validation + resolve
# Todo: 1) split pure validation + resolve from below method 2) provide completed validation()
flow = FlowValidator._validate_nodes_topology(flow)
flow.outputs = FlowValidator._ensure_outputs_valid(flow)
if storage is None:
storage = DefaultRunStorage()
run_tracker = RunTracker(storage)
cache_manager = AbstractCacheManager.init_from_env()
executor = FlowExecutor(
flow=flow,
connections=connections,
run_tracker=run_tracker,
cache_manager=cache_manager,
loaded_tools={r.node.name: r.callable for r in resolved_tools},
raise_ex=raise_ex,
working_dir=working_dir,
line_timeout_sec=line_timeout_sec,
flow_file=flow_file,
)
logger.debug("The flow executor is initialized successfully.")
return executor
@classmethod
def _is_eager_flow_yaml(cls, flow_file: Path, working_dir: Optional[Path] = None):
if Path(flow_file).suffix.lower() in [".yaml", ".yml"]:
flow_file = working_dir / flow_file if working_dir else flow_file
with open(flow_file, "r", encoding="utf-8") as fin:
flow_dag = load_yaml(fin)
if "entry" in flow_dag:
return True
return False
@classmethod
def load_and_exec_node(
cls,
flow_file: Path,
node_name: str,
*,
storage: AbstractRunStorage = None,
output_sub_dir: Optional[str] = None,
flow_inputs: Optional[Mapping[str, Any]] = None,
dependency_nodes_outputs: Optional[Mapping[str, Any]] = None,
connections: Optional[dict] = None,
working_dir: Optional[Path] = None,
raise_ex: bool = False,
):
"""Load and execute a single node from the flow.
:param flow_file: The path to the flow file.
:type flow_file: Path
:param node_name: The name of the node to be executed.
:type node_name: str
:param storage: The storage to be used for the flow.
:type storage: Optional[~promptflow.storage.AbstractRunStorage]
:param output_sub_dir: The directory to persist image for the flow. Keep it only for backward compatibility.
:type output_sub_dir: Optional[str]
:param flow_inputs: The inputs to be used for the flow. Default is None.
:type flow_inputs: Optional[Mapping[str, Any]]
:param dependency_nodes_outputs: The outputs of the dependency nodes. Default is None.
:type dependency_nodes_outputs: Optional[Mapping[str, Any]
:param connections: The connections to be used for the flow. Default is None.
:type connections: Optional[dict]
:param working_dir: The working directory to be used for the flow. Default is None.
:type working_dir: Optional[str]
:param raise_ex: Whether to raise exceptions or not. Default is False.
:type raise_ex: Optional[bool]
"""
# Inject OpenAI API to make sure traces and headers injection works and
# update OpenAI API configs from environment variables.
inject_openai_api()
OperationContext.get_instance().run_mode = RunMode.SingleNode.name
dependency_nodes_outputs = dependency_nodes_outputs or {}
# Load the node from the flow file
working_dir = Flow._resolve_working_dir(flow_file, working_dir)
with open(working_dir / flow_file, "r") as fin:
flow = Flow.deserialize(load_yaml(fin))
node = flow.get_node(node_name)
if node is None:
raise SingleNodeValidationError(
message_format=(
"Validation failed when attempting to execute the node. "
"Node '{node_name}' is not found in flow '{flow_file}'. "
"Please change node name or correct the flow file."
),
node_name=node_name,
flow_file=flow_file,
)
if not node.source or not node.type:
raise SingleNodeValidationError(
message_format=(
"Validation failed when attempting to execute the node. "
"Properties 'source' or 'type' are not specified for Node '{node_name}' in flow '{flow_file}'. "
"Please make sure these properties are in place and try again."
),
node_name=node_name,
flow_file=flow_file,
)
# Only load the node's referenced flow inputs
node_referenced_flow_inputs = FlowExecutor._get_node_referenced_flow_inputs(node, flow.inputs)
inputs_with_default_value = apply_default_value_for_input(node_referenced_flow_inputs, flow_inputs)
converted_flow_inputs_for_node = FlowValidator.convert_flow_inputs_for_node(
flow, node, inputs_with_default_value
)
inputs = load_multimedia_data(node_referenced_flow_inputs, converted_flow_inputs_for_node)
dependency_nodes_outputs = load_multimedia_data_recursively(dependency_nodes_outputs)
package_tool_keys = [node.source.tool] if node.source and node.source.tool else []
tool_resolver = ToolResolver(working_dir, connections, package_tool_keys)
resolved_node = tool_resolver.resolve_tool_by_node(node)
# Prepare callable and real inputs here
resolved_inputs = {}
for k, v in resolved_node.node.inputs.items():
value = _input_assignment_parser.parse_value(v, dependency_nodes_outputs, inputs)
resolved_inputs[k] = value
if resolved_node.node.aggregation:
# For aggregation node, we need to convert value to list.
if (
v.value_type == InputValueType.FLOW_INPUT
or v.value_type == InputValueType.NODE_REFERENCE
and flow.is_normal_node(v.value)
):
resolved_inputs[k] = [value]
# Note that the init args are only used when resolving the tool,
# so we need to remove them from the inputs before invoking.
resolved_inputs = {k: v for k, v in resolved_inputs.items() if k not in resolved_node.init_args}
if storage is None:
sub_dir = "." if output_sub_dir is None else output_sub_dir
storage = DefaultRunStorage(base_dir=working_dir, sub_dir=Path(sub_dir))
run_tracker = RunTracker(storage)
with run_tracker.node_log_manager:
# Will generate node run in context
context = FlowExecutionContext(
name=flow.name,
run_tracker=run_tracker,
cache_manager=AbstractCacheManager.init_from_env(),
)
try:
if inspect.iscoroutinefunction(resolved_node.callable):
asyncio.run(
context.invoke_tool_async(resolved_node.node, resolved_node.callable, kwargs=resolved_inputs),
)
else:
context.invoke_tool(resolved_node.node, resolved_node.callable, kwargs=resolved_inputs)
except Exception:
if raise_ex: # Only raise exception when raise_ex is True
raise
node_runs = run_tracker.collect_node_runs()
if len(node_runs) != 1:
# Should not happen except there is bug in run_tracker or thread control.
raise UnexpectedError(
message_format=(
"Single node execution failed. Expected one node result, "
"but received {node_result_num}. Please contact support for further assistance."
),
node_result_num=len(node_runs),
)
return node_runs[0]
@staticmethod
def update_environment_variables_with_connections(connections: dict):
"""Update environment variables with connections.
:param connections: A dictionary containing connection information.
:type connections: dict
:return: A dictionary containing updated environment variables.
:rtype: dict
"""
from promptflow._sdk._utils import update_environment_variables_with_connections
return update_environment_variables_with_connections(connections)
def convert_flow_input_types(self, inputs: dict) -> Mapping[str, Any]:
"""Convert the input types of the given inputs dictionary to match the expected types of the flow.
:param inputs: A dictionary containing the inputs to the flow.
:type inputs: dict
:return: A dictionary containing the converted inputs.
:rtype: Mapping[str, Any]
"""
return FlowValidator.resolve_flow_inputs_type(self._flow, inputs)
@property
def _default_inputs_mapping(self):
return {key: f"${{data.{key}}}" for key in self._flow.inputs}
@property
def has_aggregation_node(self) -> bool:
"""Check if the flow executor has any aggregation nodes.
:return: True if the flow executor has at least one aggregation node, False otherwise.
:rtype: bool
"""
return len(self._aggregation_nodes) > 0
@property
def aggregation_nodes(self):
"""Get the aggregation nodes of the flow executor.
:return: A list of aggregation nodes.
:rtype: list
"""
return self._aggregation_nodes
def _fill_lines(self, indexes, values, nlines):
"""Fill the values into the result list according to the indexes."""
result = [None] * nlines
for idx, value in zip(indexes, values):
result[idx] = value
return result
def _exec_aggregation_with_bulk_results(
self,
batch_inputs: List[dict],
results: List[LineResult],
run_id=None,
) -> AggregationResult:
if not self.aggregation_nodes:
return AggregationResult({}, {}, {})
logger.info("Executing aggregation nodes...")
run_infos = [r.run_info for r in results]
succeeded = [i for i, r in enumerate(run_infos) if r.status == Status.Completed]
succeeded_batch_inputs = [batch_inputs[i] for i in succeeded]
resolved_succeeded_batch_inputs = [
FlowValidator.ensure_flow_inputs_type(flow=self._flow, inputs=input) for input in succeeded_batch_inputs
]
succeeded_inputs = transpose(resolved_succeeded_batch_inputs, keys=list(self._flow.inputs.keys()))
aggregation_inputs = transpose(
[result.aggregation_inputs for result in results],
keys=self._aggregation_inputs_references,
)
succeeded_aggregation_inputs = collect_lines(succeeded, aggregation_inputs)
try:
aggr_results = self._exec_aggregation(succeeded_inputs, succeeded_aggregation_inputs, run_id)
logger.info("Finish executing aggregation nodes.")
return aggr_results
except PromptflowException as e:
# For PromptflowException, we already do classification, so throw directly.
raise e
except Exception as e:
error_type_and_message = f"({e.__class__.__name__}) {e}"
raise UnexpectedError(
message_format=(
"Unexpected error occurred while executing the aggregated nodes. "
"Please fix or contact support for assistance. The error details: {error_type_and_message}."
),
error_type_and_message=error_type_and_message,
) from e
@staticmethod
def _try_get_aggregation_input(val: InputAssignment, aggregation_inputs: dict):
if val.value_type != InputValueType.NODE_REFERENCE:
return val
serialized_val = val.serialize()
if serialized_val not in aggregation_inputs:
return val
return InputAssignment(value=aggregation_inputs[serialized_val])
def get_status_summary(self, run_id: str):
"""Get a summary of the status of a given run.
:param run_id: The ID of the run to get the status summary for.
:type run_id: str
:return: A summary of the status of the given run.
:rtype: str
"""
return self._run_tracker.get_status_summary(run_id)
def exec_aggregation(
self,
inputs: Mapping[str, Any],
aggregation_inputs: Mapping[str, Any],
run_id=None,
node_concurrency=DEFAULT_CONCURRENCY_FLOW,
) -> AggregationResult:
"""Execute the aggregation node of the flow.
:param inputs: A mapping of input names to their values.
:type inputs: Mapping[str, Any]
:param aggregation_inputs: A mapping of aggregation input names to their values.
:type aggregation_inputs: Mapping[str, Any]
:param run_id: The ID of the current run, if any.
:type run_id: Optional[str]
:param node_concurrency: The maximum number of nodes that can be executed concurrently.
:type node_concurrency: int
:return: The result of the aggregation node.
:rtype: ~promptflow.executor._result.AggregationResult
:raises: FlowError if the inputs or aggregation_inputs are invalid.
"""
self._node_concurrency = node_concurrency
aggregated_flow_inputs = dict(inputs or {})
aggregation_inputs = dict(aggregation_inputs or {})
FlowValidator._validate_aggregation_inputs(aggregated_flow_inputs, aggregation_inputs)
aggregated_flow_inputs = self._apply_default_value_for_aggregation_input(
self._flow.inputs, aggregated_flow_inputs, aggregation_inputs
)
# Resolve aggregated_flow_inputs from list of strings to list of objects, whose type is specified in yaml file.
# TODO: For now, we resolve type for batch run's aggregation input in _exec_aggregation_with_bulk_results.
# If we decide to merge the resolve logic into one place, remember to take care of index for batch run.
resolved_aggregated_flow_inputs = FlowValidator.resolve_aggregated_flow_inputs_type(
self._flow, aggregated_flow_inputs
)
with self._run_tracker.node_log_manager:
return self._exec_aggregation(resolved_aggregated_flow_inputs, aggregation_inputs, run_id)
@staticmethod
def _apply_default_value_for_aggregation_input(
inputs: Dict[str, FlowInputDefinition],
aggregated_flow_inputs: Mapping[str, Any],
aggregation_inputs: Mapping[str, Any],
):
aggregation_lines = 1
if aggregated_flow_inputs.values():
one_input_value = list(aggregated_flow_inputs.values())[0]
aggregation_lines = len(one_input_value)
# If aggregated_flow_inputs is empty, we should use aggregation_inputs to get the length.
elif aggregation_inputs.values():
one_input_value = list(aggregation_inputs.values())[0]
aggregation_lines = len(one_input_value)
for key, value in inputs.items():
if key not in aggregated_flow_inputs and (value and value.default is not None):
aggregated_flow_inputs[key] = [value.default] * aggregation_lines
return aggregated_flow_inputs
def _exec_aggregation(
self,
inputs: Mapping[str, Any],
aggregation_inputs: Mapping[str, Any],
run_id=None,
) -> AggregationResult:
if not self._flow.has_aggregation_node:
return AggregationResult({}, {}, {})
run_id = run_id or str(uuid.uuid4())
nodes = [copy.deepcopy(node) for node in self._flow.nodes if node.aggregation]
# Update the inputs of the aggregation nodes with the aggregation inputs.
for node in nodes:
node.inputs = {
k: FlowExecutor._try_get_aggregation_input(v, aggregation_inputs) for k, v in node.inputs.items()
}
# Load multimedia data for the flow inputs of aggregation nodes.
inputs = load_multimedia_data(self._flow.inputs, inputs)
# TODO: Use a new run tracker to avoid memory increase infinitely.
run_tracker = self._run_tracker
context = FlowExecutionContext(
name=self._flow.name,
run_tracker=run_tracker,
cache_manager=self._cache_manager,
run_id=run_id,
flow_id=self._flow_id,
)
metrics = {}
def _log_metric(key, value):
metrics[key] = value
add_metric_logger(_log_metric)
try:
self._submit_to_scheduler(context, inputs, nodes)
node_run_infos = run_tracker.collect_child_node_runs(run_id)
# Output is set as an empty dict, because the aggregation outputs story is not finalized.
return AggregationResult({}, metrics, {run.node: run for run in node_run_infos})
except Exception:
if self._raise_ex:
raise
node_run_infos = run_tracker.collect_child_node_runs(run_id)
return AggregationResult({}, metrics, {run.node: run for run in node_run_infos})
finally:
remove_metric_logger(_log_metric)
def exec(self, inputs: dict, node_concurrency=DEFAULT_CONCURRENCY_FLOW) -> dict:
"""Executes the flow with the given inputs and returns the output.
:param inputs: A dictionary containing the input values for the flow.
:type inputs: dict
:param node_concurrency: The maximum number of nodes that can be executed concurrently.
:type node_concurrency: int
:return: A dictionary containing the output values of the flow.
:rtype: dict
"""
self._node_concurrency = node_concurrency
inputs = apply_default_value_for_input(self._flow.inputs, inputs)
result = self._exec_with_trace(inputs)
# TODO: remove this line once serving directly calling self.exec_line
self._add_line_results([result])
return result.output or {}
def _exec_in_thread(self, args) -> LineResult:
inputs, run_id, line_number, variant_id, validate_inputs = args
thread_name = current_thread().name
self._processing_idx[line_number] = thread_name
self._run_tracker._activate_in_context()
results = self._exec_with_trace(
inputs, run_id=run_id, line_number=line_number, variant_id=variant_id, validate_inputs=validate_inputs
)
self._run_tracker._deactivate_in_context()
self._processing_idx.pop(line_number)
self._completed_idx[line_number] = thread_name
return results
def _extract_aggregation_inputs(self, nodes_outputs: dict):
return {
prop: self._extract_aggregation_input(nodes_outputs, prop) for prop in self._aggregation_inputs_references
}
def _extract_aggregation_input(self, nodes_outputs: dict, aggregation_input_property: str):
assign = InputAssignment.deserialize(aggregation_input_property)
return _input_assignment_parser.parse_value(assign, nodes_outputs, {})
def exec_line(
self,
inputs: Mapping[str, Any],
index: Optional[int] = None,
run_id: Optional[str] = None,
variant_id: str = "",
validate_inputs: bool = True,
node_concurrency=DEFAULT_CONCURRENCY_FLOW,
allow_generator_output: bool = False,
line_timeout_sec: Optional[int] = None,
) -> LineResult:
"""Execute a single line of the flow.
:param inputs: The input values for the line.
:type inputs: Mapping[str, Any]
:param index: The index of the line to execute.
:type index: Optional[int]
:param run_id: The ID of the flow run.
:type run_id: Optional[str]
:param variant_id: The ID of the variant to execute.
:type variant_id: str
:param validate_inputs: Whether to validate the input values.
:type validate_inputs: bool
:param node_concurrency: The maximum number of nodes that can be executed concurrently.
:type node_concurrency: int
:param allow_generator_output: Whether to allow generator output.
:type allow_generator_output: bool
:param line_timeout_sec: The maximum time to wait for a line of output.
:type line_timeout_sec: Optional[int]
:return: The result of executing the line.
:rtype: ~promptflow.executor._result.LineResult
"""
self._node_concurrency = node_concurrency
# TODO: Pass line_timeout_sec to flow node scheduler instead of updating self._line_timeout_sec
self._line_timeout_sec = line_timeout_sec or self._line_timeout_sec
inputs = apply_default_value_for_input(self._flow.inputs, inputs)
# For flow run, validate inputs as default
with self._run_tracker.node_log_manager:
# exec_line interface may be called when executing a batch run, so we only set run_mode as flow run when
# it is not set.
run_id = run_id or str(uuid.uuid4())
with self._update_operation_context(run_id, index):
line_result = self._exec_with_trace(
inputs,
run_id=run_id,
line_number=index,
variant_id=variant_id,
validate_inputs=validate_inputs,
allow_generator_output=allow_generator_output,
)
# Return line result with index
if index is not None and isinstance(line_result.output, dict):
line_result.output[LINE_NUMBER_KEY] = index
return line_result
@contextlib.contextmanager
def _update_operation_context(self, run_id: str, line_number: int):
operation_context = OperationContext.get_instance()
original_mode = operation_context.get("run_mode", None)
values_for_context = {"flow_id": self._flow_id, "root_run_id": run_id}
if original_mode == RunMode.Batch.name:
values_for_otel = {
"batch_run_id": run_id,
"line_number": line_number,
}
else:
values_for_otel = {"line_run_id": run_id}
try:
operation_context.run_mode = original_mode or RunMode.Test.name
operation_context.update(values_for_context)
for k, v in values_for_otel.items():
operation_context._add_otel_attributes(k, v)
yield
finally:
for k in values_for_context:
operation_context.pop(k)
operation_context._remove_otel_attributes(values_for_otel.keys())
if original_mode is None:
operation_context.pop("run_mode")
else:
operation_context.run_mode = original_mode
def _add_line_results(self, line_results: List[LineResult], run_tracker: Optional[RunTracker] = None):
run_tracker = run_tracker or self._run_tracker
run_tracker._flow_runs.update({result.run_info.run_id: result.run_info for result in line_results})
run_tracker._node_runs.update(
{
node_run_info.run_id: node_run_info
for result in line_results
for node_run_info in result.node_run_infos.values()
}
)
@staticmethod
def _get_node_referenced_flow_inputs(
node, flow_inputs: Dict[str, FlowInputDefinition]
) -> Dict[str, FlowInputDefinition]:
node_referenced_flow_inputs = {}
for _, value in node.inputs.items():
# Only add flow input to node_referenced_flow_inputs when it is exist and referenced by node.
# If flow input is not exist, we will raise exception in FlowValidator.convert_flow_inputs_for_node.
if value.value_type == InputValueType.FLOW_INPUT and value.value in flow_inputs:
node_referenced_flow_inputs[value.value] = flow_inputs[value.value]
return node_referenced_flow_inputs
def _exec_with_trace(
self,
inputs: Mapping[str, Any],
run_id: Optional[str] = None,
line_number: Optional[int] = None,
variant_id: str = "",
validate_inputs: bool = False,
allow_generator_output: bool = False,
) -> LineResult:
"""execute line run with trace
This method is similar to `_exec`, but it also includes tracing functionality.
It starts a new span, enriches it with input and output data, and sets the span status.
Args:
inputs (Mapping): flow inputs
run_id: the id to identify the flow run
line_number: line number for batch inputs
variant_id: variant id for the line run
validate_inputs:
Flag to indicate if input validation needed. It is used along with "_raise_ex" to
define if exception shall be raised if inputs validation (type check, etc) failed
The flag is True for Flow Run, False for bulk run as default
allow_generator_output:
Flag to indicate if generator output is allowed.
Returns:
LineResult: Line run result
"""
with open_telemetry_tracer.start_as_current_span(self._flow.name) as span:
# initialize span
span.set_attributes(
{
"framework": "promptflow",
"span_type": TraceType.FLOW.value,
}
)
enrich_span_with_context(span)
# enrich span with input
enrich_span_with_input(span, inputs)
# invoke
result = self._exec(
inputs,
run_id=run_id,
line_number=line_number,
variant_id=variant_id,
validate_inputs=validate_inputs,
allow_generator_output=allow_generator_output,
)
# extract output from result
output = result.output
# enrich span with output
enrich_span_with_output(span, output)
enrich_span_with_openai_tokens(span, trace_type=TraceType.FLOW)
# set status
span.set_status(StatusCode.OK)
return result
def _exec(
self,
inputs: Mapping[str, Any],
run_id: Optional[str] = None,
line_number: Optional[int] = None,
variant_id: str = "",
validate_inputs: bool = False,
allow_generator_output: bool = False,
) -> LineResult:
"""execute line run
Args:
inputs (Mapping): flow inputs
run_id: the id to identify the flow run
line_number: line number for batch inputs
validate_inputs:
Flag to indicate if input validation needed. It is used along with "_raise_ex" to
define if exception shall be raised if inputs validation (type check, etc) failed
The flag is True for Flow Run, False for bulk run as default
allow_generator_output:
Flag to indicate if generator output is allowed.
Returns:
LineResult: Line run result
"""
line_run_id = run_id if line_number is None else f"{run_id}_{line_number}"
run_tracker = RunTracker(
self._run_tracker._storage, self._run_tracker._run_mode, self._run_tracker.node_log_manager
)
# We need to copy the allow_generator_types from the original run_tracker.
run_tracker.allow_generator_types = self._run_tracker.allow_generator_types
run_info: FlowRunInfo = run_tracker.start_flow_run(
flow_id=self._flow_id,
root_run_id=run_id,
run_id=line_run_id,
parent_run_id=run_id,
index=line_number,
variant_id=variant_id,
)
context = FlowExecutionContext(
name=self._flow.name,
run_tracker=run_tracker,
cache_manager=self._cache_manager,
run_id=run_id,
flow_id=self._flow_id,
line_number=line_number,
variant_id=variant_id,
)
output = {}
aggregation_inputs = {}
try:
if validate_inputs:
inputs = FlowValidator.ensure_flow_inputs_type(flow=self._flow, inputs=inputs, idx=line_number)
inputs = load_multimedia_data(self._flow.inputs, inputs)
# Inputs are assigned after validation and multimedia data loading, instead of at the start of the flow run.
# This way, if validation or multimedia data loading fails, we avoid persisting invalid inputs.
run_info.inputs = inputs
output, nodes_outputs = self._traverse_nodes(inputs, context)
output = self._stringify_generator_output(output) if not allow_generator_output else output
# Persist the node runs for the nodes that have a generator output
generator_output_nodes = [
nodename for nodename, output in nodes_outputs.items() if isinstance(output, GeneratorType)
]
run_tracker.persist_selected_node_runs(run_info, generator_output_nodes)
run_tracker.allow_generator_types = allow_generator_output
run_tracker.end_run(line_run_id, result=output)
aggregation_inputs = self._extract_aggregation_inputs(nodes_outputs)
except KeyboardInterrupt as ex:
# Run will be cancelled when the process receives a SIGINT signal.
# KeyboardInterrupt will be raised after asyncio finishes its signal handling
# End run with the KeyboardInterrupt exception, so that its status will be Canceled
flow_logger.info("Received KeyboardInterrupt, cancel the run.")
run_tracker.end_run(line_run_id, ex=ex)
raise
except Exception as e:
run_tracker.end_run(line_run_id, ex=e)
if self._raise_ex:
raise
finally:
run_tracker._update_flow_run_info_with_node_runs(run_info)
run_tracker.persist_flow_run(run_info)
node_run_infos = run_tracker.collect_child_node_runs(line_run_id)
node_runs = {node_run.node: node_run for node_run in node_run_infos}
return LineResult(output, aggregation_inputs, run_info, node_runs)
def _extract_outputs(self, nodes_outputs, bypassed_nodes, flow_inputs):
outputs = {}
for name, output in self._flow.outputs.items():
if output.reference.value_type == InputValueType.LITERAL:
outputs[name] = output.reference.value
continue
if output.reference.value_type == InputValueType.FLOW_INPUT:
outputs[name] = flow_inputs[output.reference.value]
continue
if output.reference.value_type != InputValueType.NODE_REFERENCE:
raise NotSupported(
message_format=(
"The output type '{output_type}' is currently unsupported. "
"Please choose from available types: '{supported_output_type}' and try again."
),
output_type=output.reference.value_type.value
if hasattr(output.reference.value_type, "value")
else output.reference.value_type,
supported_output_type=[output_type.value for output_type in InputValueType],
)
node = next((n for n in self._flow.nodes if n.name == output.reference.value), None)
if not node:
raise OutputReferenceNotExist(
message_format=(
"The output '{output_name}' for flow is incorrect. The node '{node_name}' "
"referenced by the output '{output_name}' can not found in flow. "
"Please rectify the error in your flow and try again."
),
node_name=output.reference.value,
output_name=name,
)
if node.aggregation:
# Note that the reduce node referenced in the output is not supported.
continue
if node.name not in nodes_outputs:
raise NodeOutputNotFound(
message_format=(
"The output '{output_name}' for flow is incorrect. "
"No outputs found for node '{node_name}'. Please review the problematic "
"output and rectify the error."
),
output_name=name,
node_name=node.name,
)
if output.reference.value in bypassed_nodes:
flow_logger.warning(
f"The node referenced by output:'{output.reference.value}' is bypassed, which is not recommended."
)
node_result = nodes_outputs[output.reference.value]
outputs[name] = _input_assignment_parser.parse_node_property(
output.reference.value, node_result, output.reference.property
)
return outputs
def _should_use_async(self):
return (
all(inspect.iscoroutinefunction(f) for f in self._tools_manager._tools.values())
or os.environ.get("PF_USE_ASYNC", "false").lower() == "true"
)
def _traverse_nodes(self, inputs, context: FlowExecutionContext) -> Tuple[dict, dict]:
batch_nodes = [node for node in self._flow.nodes if not node.aggregation]
outputs = {}
# TODO: Use a mixed scheduler to support both async and thread pool mode.
if self._should_use_async():
flow_logger.info("Start executing nodes in async mode.")
scheduler = AsyncNodesScheduler(self._tools_manager, self._node_concurrency)
nodes_outputs, bypassed_nodes = asyncio.run(scheduler.execute(batch_nodes, inputs, context))
else:
flow_logger.info("Start executing nodes in thread pool mode.")
nodes_outputs, bypassed_nodes = self._submit_to_scheduler(context, inputs, batch_nodes)
outputs = self._extract_outputs(nodes_outputs, bypassed_nodes, inputs)
return outputs, nodes_outputs
def _stringify_generator_output(self, outputs: dict):
for k, v in outputs.items():
if isinstance(v, GeneratorType):
outputs[k] = "".join(str(chuck) for chuck in v)
return outputs
def _submit_to_scheduler(self, context: FlowExecutionContext, inputs, nodes: List[Node]) -> Tuple[dict, dict]:
if not isinstance(self._node_concurrency, int):
raise UnexpectedError(
message_format=(
"Flow execution failed. To proceed, ensure that a valid node concurrency value is set. "
"The current value is {current_value}. Please contact support for further assistance."
),
current_value=self._node_concurrency,
)
return FlowNodesScheduler(
self._tools_manager,
inputs,
nodes,
self._node_concurrency,
context,
).execute(self._line_timeout_sec)
@staticmethod
def apply_inputs_mapping(
inputs: Mapping[str, Mapping[str, Any]],
inputs_mapping: Mapping[str, str],
) -> Dict[str, Any]:
# TODO: This function will be removed after the batch engine refactoring is completed.
from promptflow._utils.inputs_mapping_utils import apply_inputs_mapping
return apply_inputs_mapping(inputs, inputs_mapping)
def enable_streaming_for_llm_flow(self, stream_required: Callable[[], bool]):
"""Enable the LLM node that is connected to output to return streaming results controlled by `stream_required`.
If the stream_required callback returns True, the LLM node will return a generator of strings.
Otherwise, the LLM node will return a string.
:param stream_required: A callback that takes no arguments and returns a boolean value indicating whether \
streaming results should be enabled for the LLM node.
:type stream_required: Callable[[], bool]
:return: None
"""
for node in self._flow.nodes:
streaming_option_parameter = self._parse_streaming_option_parameter(node)
if (
streaming_option_parameter is not None
and self._flow.is_referenced_by_flow_output(node)
and not self._flow.is_referenced_by_other_node(node)
):
wrapper = _inject_stream_options(stream_required, streaming_option_parameter)
self._tools_manager.wrap_tool(node.name, wrapper=wrapper)
def _parse_streaming_option_parameter(self, node: Node) -> Optional[str]:
if self._flow.is_llm_node(node):
return "stream"
tool_function = self._tools_manager.get_tool(node.name)
return getattr(tool_function, STREAMING_OPTION_PARAMETER_ATTR, None)
def ensure_flow_is_serializable(self):
"""Ensure that the flow is serializable.
Some of the nodes may return a generator of strings to create streaming outputs.
This is useful when the flow is deployed as a web service.
However, in the interactive mode, the executor assumes that the node result is JSON serializable.
This method adds a wrapper to each node in the flow
to consume the streaming outputs and merge them into a string for executor usage.
:return: None
"""
for node in self._flow.nodes:
self._tools_manager.wrap_tool(node.name, wrapper=_ensure_node_result_is_serializable)
def _inject_stream_options(should_stream: Callable[[], bool], streaming_option_parameter="stream"):
"""Inject the stream options to the decorated function.
AzureOpenAI.completion and AzureOpenAI.chat tools support both stream and non-stream mode.
The stream mode is controlled by the "stream" parameter.
"""
def stream_option_decorator(f):
# We only wrap the function if it has a "stream" parameter
signature = inspect.signature(f)
if streaming_option_parameter not in signature.parameters:
return f
@functools.wraps(f)
def wrapper(*args, **kwargs):
kwargs = kwargs or {}
kwargs.update({streaming_option_parameter: should_stream()})
return f(*args, **kwargs)
return wrapper
return stream_option_decorator
def enable_streaming_for_llm_tool(f):
"""Enable the stream mode for LLM tools that support it.
:param f: The function to wrap.
:type f: function
:return: The wrapped function.
:rtype: function
AzureOpenAI.completion and AzureOpenAI.chat tools support both stream and non-stream mode.
The stream mode is turned off by default. Use this wrapper to turn it on.
"""
# We only wrap the function if it has a "stream" parameter
signature = inspect.signature(f)
if "stream" not in signature.parameters:
return f
@functools.wraps(f)
def wrapper(*args, **kwargs):
kwargs = kwargs or {}
kwargs.update(stream=True)
return f(*args, **kwargs)
return wrapper
def _ensure_node_result_is_serializable(f):
"""Ensure the node result is serializable.
Some of the nodes may return a generator of strings to create streaming outputs.
This is useful when the flow is deployed as a web service.
However, in the interactive mode, the executor assumes that the node result is JSON serializable.
This wrapper ensures the node result is serializable
by consuming the data from the generator and merging them into a string.
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
result = f(*args, **kwargs)
if isinstance(result, GeneratorType):
result = "".join(str(trunk) for trunk in result)
return result
return wrapper
def execute_flow(
flow_file: Path,
working_dir: Path,
output_dir: Path,
connections: dict,
inputs: Mapping[str, Any],
*,
run_id: str = None,
run_aggregation: bool = True,
enable_stream_output: bool = False,
allow_generator_output: bool = False, # TODO: remove this
**kwargs,
) -> LineResult:
"""Execute the flow, including aggregation nodes.
:param flow_file: The path to the flow file.
:type flow_file: Path
:param working_dir: The working directory of the flow.
:type working_dir: Path
:param output_dir: Relative path relative to working_dir.
:type output_dir: Path
:param connections: A dictionary containing connection information.
:type connections: dict
:param inputs: A dictionary containing the input values for the flow.
:type inputs: Mapping[str, Any]
:param enable_stream_output: Whether to allow stream (generator) output for flow output. Default is False.
:type enable_stream_output: Optional[bool]
:param run_id: Run id will be set in operation context and used for session.
:type run_id: Optional[str]
:param kwargs: Other keyword arguments to create flow executor.
:type kwargs: Any
:return: The line result of executing the flow.
:rtype: ~promptflow.executor._result.LineResult
"""
flow_executor = FlowExecutor.create(flow_file, connections, working_dir, raise_ex=False, **kwargs)
flow_executor.enable_streaming_for_llm_flow(lambda: enable_stream_output)
with _change_working_dir(working_dir):
# execute nodes in the flow except the aggregation nodes
# TODO: remove index=0 after UX no longer requires a run id similar to batch runs
# (run_id_index, eg. xxx_0) for displaying the interface
line_result = flow_executor.exec_line(
inputs, index=0, allow_generator_output=allow_generator_output, run_id=run_id
)
# persist the output to the output directory
line_result.output = persist_multimedia_data(line_result.output, base_dir=working_dir, sub_dir=output_dir)
if run_aggregation and line_result.aggregation_inputs:
# convert inputs of aggregation to list type
flow_inputs = {k: [v] for k, v in inputs.items()}
aggregation_inputs = {k: [v] for k, v in line_result.aggregation_inputs.items()}
aggregation_results = flow_executor.exec_aggregation(
flow_inputs, aggregation_inputs=aggregation_inputs, run_id=run_id
)
line_result.node_run_infos = {**line_result.node_run_infos, **aggregation_results.node_run_infos}
line_result.run_info.metrics = aggregation_results.metrics
if isinstance(line_result.output, dict):
# remove line_number from output
line_result.output.pop(LINE_NUMBER_KEY, None)
return line_result
| promptflow/src/promptflow/promptflow/executor/flow_executor.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/executor/flow_executor.py",
"repo_id": "promptflow",
"token_count": 22729
} | 24 |
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
exclude: '(^docs/)|flows|scripts|src/promptflow/promptflow/azure/_restclient/|src/promptflow/tests/test_configs|src/promptflow-tools'
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.2.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-json
- id: check-merge-conflict
- repo: https://github.com/psf/black
rev: 22.3.0 # Replace by any tag/version: https://github.com/psf/black/tags
hooks:
- id: black
language_version: python3 # Should be a command that runs python3.6+
args:
- "--line-length=120"
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.3.0
hooks:
- id: flake8
# Temporary disable this since it gets stuck when updating env
- repo: https://github.com/streetsidesoftware/cspell-cli
rev: v7.3.0
hooks:
- id: cspell
args: ['--config', '.cspell.json', "--no-must-find-files"]
- repo: https://github.com/hadialqattan/pycln
rev: v2.1.2 # Possible releases: https://github.com/hadialqattan/pycln/tags
hooks:
- id: pycln
name: "Clean unused python imports"
args: [--config=setup.cfg]
- repo: https://github.com/pycqa/isort
rev: 5.12.0
hooks:
- id: isort
# stages: [commit]
name: isort-python
# Use black profile for isort to avoid conflicts
# see https://github.com/PyCQA/isort/issues/1518
args: ["--profile", "black", --line-length=120]
| promptflow/.pre-commit-config.yaml/0 | {
"file_path": "promptflow/.pre-commit-config.yaml",
"repo_id": "promptflow",
"token_count": 812
} | 0 |
# Cloud
Prompt flow streamlines the process of developing AI applications based on LLM, easing prompt engineering, prototyping, evaluating, and fine-tuning for high-quality products.
Transitioning to production, however, typically requires a comprehensive **LLMOps** process, LLMOps is short for large language model operations. This can often be a complex task, demanding high availability and security, particularly vital for large-scale team collaboration and lifecycle management when deploying to production.
To assist in this journey, we've introduced **Azure AI**, a **cloud-based platform** tailored for executing LLMOps, focusing on boosting productivity for enterprises.
* Private data access and controls
* Collaborative development
* Automating iterative experimentation and CI/CD
* Deployment and optimization
* Safe and Responsible AI
![img](../media/cloud/azureml/llmops_cloud_value.png)
## Transitioning from local to cloud (Azure AI)
In prompt flow, You can develop your flow locally and then seamlessly transition to Azure AI. Here are a few scenarios where this might be beneficial:
| Scenario | Benefit | How to|
| --- | --- |--- |
| Collaborative development | Azure AI provides a cloud-based platform for flow development and management, facilitating sharing and collaboration across multiple teams, organizations, and tenants.| [Submit a run using pfazure](./azureai/quick-start/index.md), based on the flow file in your code base.|
| Processing large amounts of data in parallel pipelines | Transitioning to Azure AI allows you to use your flow as a parallel component in a pipeline job, enabling you to process large amounts of data and integrate with existing pipelines. | Learn how to [Use flow in Azure ML pipeline job](./azureai/use-flow-in-azure-ml-pipeline.md).|
| Large-scale Deployment | Azure AI allows for seamless deployment and optimization when your flow is ready for production and requires high availability and security. | Use `pf flow build` to deploy your flow to [Azure App Service](./azureai/deploy-to-azure-appservice.md).|
| Data Security and Responsible AI Practices | If your flow handling sensitive data or requiring ethical AI practices, Azure AI offers robust security, responsible AI services, and features for data storage, identity, and access control. | Follow the steps mentioned in the above scenarios.|
For more resources on Azure AI, visit the cloud documentation site: [Build AI solutions with prompt flow](https://learn.microsoft.com/en-us/azure/machine-learning/prompt-flow/get-started-prompt-flow?view=azureml-api-2).
```{toctree}
:caption: AzureAI
:maxdepth: 2
azureai/quick-start/index
azureai/manage-flows
azureai/consume-connections-from-azure-ai
azureai/deploy-to-azure-appservice
azureai/use-flow-in-azure-ml-pipeline.md
azureai/faq
azureai/runtime-change-log.md
```
| promptflow/docs/cloud/index.md/0 | {
"file_path": "promptflow/docs/cloud/index.md",
"repo_id": "promptflow",
"token_count": 722
} | 1 |
# Develop chat flow
:::{admonition} Experimental feature
This is an experimental feature, and may change at any time. Learn [more](../faq.md#stable-vs-experimental).
:::
From this document, you can learn how to develop a chat flow by writing a flow yaml from scratch. You can
find additional information about flow yaml schema in [Flow YAML Schema](../../reference/flow-yaml-schema-reference.md).
## Flow input data
The most important elements that differentiate a chat flow from a standard flow are **chat input** and **chat history**. A chat flow can have multiple inputs, but **chat history** and **chat input** are required inputs in chat flow.
- **Chat Input**: Chat input refers to the messages or queries submitted by users to the chatbot. Effectively handling chat input is crucial for a successful conversation, as it involves understanding user intentions, extracting relevant information, and triggering appropriate responses.
- **Chat History**: Chat history is the record of all interactions between the user and the chatbot, including both user inputs and AI-generated outputs. Maintaining chat history is essential for keeping track of the conversation context and ensuring the AI can generate contextually relevant responses. Chat history is a special type of chat flow input, that stores chat messages in a structured format.
An example of chat history:
```python
[
{"inputs": {"question": "What types of container software there are?"}, "outputs": {"answer": "There are several types of container software available, including: Docker, Kubernetes"}},
{"inputs": {"question": "What's the different between them?"}, "outputs": {"answer": "The main difference between the various container software systems is their functionality and purpose. Here are some key differences between them..."}},
]
```
You can set **is_chat_input**/**is_chat_history** to **true** to add chat_input/chat_history to the chat flow.
```yaml
inputs:
chat_history:
type: list
is_chat_history: true
default: []
question:
type: string
is_chat_input: true
default: What is ChatGPT?
```
For more information see [develop the flow using different tools](./develop-standard-flow.md#flow-input-data).
## Develop the flow using different tools
In one flow, you can consume different kinds of tools. We now support built-in tool like
[LLM](../../reference/tools-reference/llm-tool.md), [Python](../../reference/tools-reference/python-tool.md) and
[Prompt](../../reference/tools-reference/prompt-tool.md) and
third-party tool like [Serp API](../../reference/tools-reference/serp-api-tool.md),
[Vector Search](../../reference/tools-reference/vector_db_lookup_tool.md), etc.
For more information see [develop the flow using different tools](./develop-standard-flow.md#develop-the-flow-using-different-tools).
## Chain your flow - link nodes together
Before linking nodes together, you need to define and expose an interface.
For more information see [chain your flow](./develop-standard-flow.md#chain-your-flow---link-nodes-together).
## Set flow output
**Chat output** is required output in the chat flow. It refers to the AI-generated messages that are sent to the user in response to their inputs. Generating contextually appropriate and engaging chat outputs is vital for a positive user experience.
You can set **is_chat_output** to **true** to add chat_output to the chat flow.
```yaml
outputs:
answer:
type: string
reference: ${chat.output}
is_chat_output: true
```
| promptflow/docs/how-to-guides/develop-a-flow/develop-chat-flow.md/0 | {
"file_path": "promptflow/docs/how-to-guides/develop-a-flow/develop-chat-flow.md",
"repo_id": "promptflow",
"token_count": 954
} | 2 |
# Frequency asked questions (FAQ)
## General ##
### Stable vs experimental
Prompt flow provides both stable and experimental features in the same SDK.
|Feature status | Description |
|----------------|----------------|
Stable features | **Production ready** <br/><br/> These features are recommended for most use cases and production environments. They are updated less frequently then experimental features.|
Experimental features | **Developmental** <br/><br/> These features are newly developed capabilities & updates that may not be ready or fully tested for production usage. While the features are typically functional, they can include some breaking changes. Experimental features are used to iron out SDK breaking bugs, and will only receive updates for the duration of the testing period. Experimental features are also referred to as features that are in **preview**. <br/> As the name indicates, the experimental (preview) features are for experimenting and is **not considered bug free or stable**. For this reason, we only recommend experimental features to advanced users who wish to try out early versions of capabilities and updates, and intend to participate in the reporting of bugs and glitches.
### OpenAI 1.x support
Please use the following command to upgrade promptflow for openai 1.x support:
```
pip install promptflow>=1.1.0
pip install promptflow-tools>=1.0.0
```
Note that the command above will upgrade your openai package a version later than 1.0.0,
which may introduce breaking changes to custom tool code.
Reach [OpenAI migration guide](https://github.com/openai/openai-python/discussions/742) for more details.
## Troubleshooting ##
### Connection creation failed with StoreConnectionEncryptionKeyError
```
Connection creation failed with StoreConnectionEncryptionKeyError: System keyring backend service not found in your operating system. See https://pypi.org/project/keyring/ to install requirement for different operating system, or 'pip install keyrings.alt' to use the third-party backend.
```
This error raised due to keyring can't find an available backend to store keys.
For example [macOS Keychain](https://en.wikipedia.org/wiki/Keychain_%28software%29) and [Windows Credential Locker](https://learn.microsoft.com/en-us/windows/uwp/security/credential-locker)
are valid keyring backends.
To resolve this issue, install the third-party keyring backend or write your own keyring backend, for example:
`pip install keyrings.alt`
For more detail about keyring third-party backend, please refer to 'Third-Party Backends' in [keyring](https://pypi.org/project/keyring/).
### Pf visualize show error: "tcgetpgrp failed: Not a tty"
If you are using WSL, this is a known issue for `webbrowser` under WSL; see [this issue](https://github.com/python/cpython/issues/89752) for more information. Please try to upgrade your WSL to 22.04 or later, this issue should be resolved.
If you are still facing this issue with WSL 22.04 or later, or you are not even using WSL, please open an issue to us.
### Installed tool not appearing in VSCode Extension tool list
After installing a tool package via `pip install [tool-package-name]`, the new tool may not immediately appear in the tool list within the VSCode Extension, as shown below:
![VSCode Extension tool list](../media/how-to-guides/vscode-tool-list.png)
This is often due to outdated cache. To refresh the tool list and make newly installed tools visible:
1. Open the VSCode Extension window.
2. Bring up the command palette by pressing "Ctrl+Shift+P".
3. Type and select the "Developer: Reload Webviews" command.
4. Wait a moment for the tool list refreshing.
Reloading clears the previous cache and populates the tool list with any newly installed tools. So that the missing tools are now visible.
### Set logging level
Promptflow uses `logging` module to log messages. You can set logging level via environment variable `PF_LOGGING_LEVEL`, valid values includes `CRITICAL`, `ERROR`, `WARNING`, `INFO`, `DEBUG`, default to `INFO`.
Below is the serving logs after setting `PF_LOGGING_LEVEL` to `DEBUG`:
![img](../media/how-to-guides/pf_logging_level.png)
Compare to the serving logs with `WARNING` level:
![img](../media/how-to-guides/pf_logging_level_warning.png)
### Set environment variables
Currently, promptflow supports the following environment variables:
**PF_WORKER_COUNT**
Effective for batch run only, count of parallel workers in batch run execution.
The default value is 4 (was 16 when promptflow<1.4.0)
Please take the following points into consideration when changing it:
1. The concurrency should not exceed the total data rows count. Otherwise, the execution may slow down due to additional time spent on process startup and shutdown.
2. High parallelism may cause the underlying API call to reach the rate limit of your LLM endpoint. In which case you can decrease the `PF_WORKER_COUNT` or increase the rate limit. Please refer to [this doc](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/quota) on quota management. Then you can refer to this expression to set up the concurrency.
```
PF_WORKER_COUNT <= TPM * duration_seconds / token_count / 60
```
TPM: token per minute, capacity rate limit of your LLM endpoint
duration_seconds: single flow run duration in seconds
token_count: single flow run token count
For example, if your endpoint TPM (token per minute) is 50K, the single flow run takes 10k tokens and runs for 30s, pls do not set up PF_WORKER_COUNT bigger than 2. This is a rough estimation. Please also consider collboaration (teammates use the same endpoint at the same time) and tokens consumed in deployed inference endpoints, playground and other cases which might send request to your LLM endpoints.
**PF_BATCH_METHOD**
Valid for batch run only. Optional values: 'spawn', 'fork'.
**spawn**
1. The child processes will not inherit resources of the parent process, therefore, each process needs to reinitialize the resources required for the flow, which may use more system memory.
2. Starting a process is slow because it will take some time to initialize the necessary resources.
**fork**
1. Use the copy-on-write mechanism, the child processes will inherit all the resources of the parent process, thereby using less system memory.
2. The process starts faster as it doesn't need to reinitialize resources.
Note: Windows only supports spawn, Linux and macOS support both spawn and fork.
#### How to configure environment variables
1. Configure environment variables in ```flow.dag.yaml```. Example:
```
inputs: []
outputs: []
nodes: []
environment_variables:
PF_WORKER_COUNT: 2
PF_BATCH_METHOD: "spawn"
MY_CUSTOM_SETTING: my_custom_value
```
2. Specify environment variables when submitting runs.
::::{tab-set}
:::{tab-item} CLI
:sync: CLI
Use this parameter: ```--environment-variable``` to specify environment variables.
Example: ```--environment-variable PF_WORKER_COUNT="2" PF_BATCH_METHOD="spawn"```.
:::
:::{tab-item} SDK
:sync: SDK
Specify environment variables when creating run. Example:
``` python
pf = PFClient(
credential=credential,
subscription_id="<SUBSCRIPTION_ID>",
resource_group_name="<RESOURCE_GROUP>",
workspace_name="<AML_WORKSPACE_NAME>",
)
flow = "web-classification"
data = "web-classification/data.jsonl"
runtime = "example-runtime-ci"
environment_variables = {"PF_WORKER_COUNT": "2", "PF_BATCH_METHOD": "spawn"}
# create run
base_run = pf.run(
flow=flow,
data=data,
runtime=runtime,
environment_variables=environment_variables,
)
```
:::
:::{tab-item} VS Code Extension
:sync: VS Code Extension
VSCode Extension supports specifying environment variables only when submitting batch runs.
Specify environment variables in ```batch_run_create.yaml```. Example:
``` yaml
name: flow_name
display_name: display_name
flow: flow_folder
data: data_file
column_mapping:
customer_info: <Please select a data input>
history: <Please select a data input>
environment_variables:
PF_WORKER_COUNT: "2"
PF_BATCH_METHOD: "spawn"
```
:::
::::
#### Priority
The environment variables specified when submitting runs always takes precedence over the environment variables in the flow.dag.yaml file.
| promptflow/docs/how-to-guides/faq.md/0 | {
"file_path": "promptflow/docs/how-to-guides/faq.md",
"repo_id": "promptflow",
"token_count": 2390
} | 3 |
# Reference
**Current stable version:**
- [promptflow](https://pypi.org/project/promptflow):
[![PyPI version](https://badge.fury.io/py/promptflow.svg)](https://badge.fury.io/py/promptflow)
[![PyPI - Downloads](https://img.shields.io/pypi/dm/promptflow)](https://pypi.org/project/promptflow/)
- [promptflow-tools](https://pypi.org/project/promptflow-tools/):
[![PyPI version](https://badge.fury.io/py/promptflow-tools.svg)](https://badge.fury.io/py/promptflow-tools)
[![PyPI - Downloads](https://img.shields.io/pypi/dm/promptflow-tools)](https://pypi.org/project/promptflow-tools/)
```{toctree}
:caption: Command Line Interface
:maxdepth: 1
pf-command-reference.md
pfazure-command-reference.md
```
```{toctree}
:caption: Python Library Reference
:maxdepth: 4
python-library-reference/promptflow
```
```{toctree}
:caption: Tool Reference
:maxdepth: 1
tools-reference/llm-tool
tools-reference/prompt-tool
tools-reference/python-tool
tools-reference/serp-api-tool
tools-reference/faiss_index_lookup_tool
tools-reference/vector_db_lookup_tool
tools-reference/embedding_tool
tools-reference/open_model_llm_tool
tools-reference/openai-gpt-4v-tool
tools-reference/contentsafety_text_tool
tools-reference/aoai-gpt4-turbo-vision
```
```{toctree}
:caption: YAML Schema
:maxdepth: 1
flow-yaml-schema-reference.md
run-yaml-schema-reference.md
```
| promptflow/docs/reference/index.md/0 | {
"file_path": "promptflow/docs/reference/index.md",
"repo_id": "promptflow",
"token_count": 535
} | 4 |
import functools
import json
import os
import re
import requests
import sys
import time
import tempfile
from abc import abstractmethod
from datetime import datetime, timedelta
from enum import Enum
from typing import Any, Dict, List, Tuple, Optional, Union
from promptflow._core.tool import ToolProvider, tool
from promptflow._sdk._constants import ConnectionType
from promptflow.connections import CustomConnection
from promptflow.contracts.types import PromptTemplate
from promptflow.tools.common import render_jinja_template, validate_role
from promptflow.tools.exception import (
OpenModelLLMOnlineEndpointError,
OpenModelLLMUserError,
OpenModelLLMKeyValidationError,
ChatAPIInvalidRole
)
DEPLOYMENT_DEFAULT = "default"
CONNECTION_CACHE_FILE = "pf_connection_names"
VALID_LLAMA_ROLES = {"system", "user", "assistant"}
AUTH_REQUIRED_CONNECTION_TYPES = {"serverlessendpoint", "onlineendpoint", "connection"}
REQUIRED_CONFIG_KEYS = ["endpoint_url", "model_family"]
REQUIRED_SECRET_KEYS = ["endpoint_api_key"]
ENDPOINT_REQUIRED_ENV_VARS = ["AZUREML_ARM_SUBSCRIPTION", "AZUREML_ARM_RESOURCEGROUP", "AZUREML_ARM_WORKSPACE_NAME"]
def handle_online_endpoint_error(max_retries: int = 5,
initial_delay: float = 2,
exponential_base: float = 3):
def deco_retry(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
delay = initial_delay
for i in range(max_retries):
try:
return func(*args, **kwargs)
except OpenModelLLMOnlineEndpointError as e:
if i == max_retries - 1:
error_message = f"Exception hit calling Online Endpoint: {type(e).__name__}: {str(e)}"
print(error_message, file=sys.stderr)
raise OpenModelLLMOnlineEndpointError(message=error_message)
delay *= exponential_base
time.sleep(delay)
return wrapper
return deco_retry
class ConnectionCache:
def __init__(self,
use_until: datetime,
subscription_id: str,
resource_group: str,
workspace_name: str,
connection_names: List[str]):
self.use_until = use_until
self.subscription_id = subscription_id
self.resource_group = resource_group
self.workspace_name = workspace_name
self.connection_names = connection_names
@classmethod
def from_filename(self, file):
cache = json.load(file)
return self(cache['use_until'],
cache['subscription_id'],
cache['resource_group'],
cache['workspace_name'],
cache['connection_names'])
def can_use(self,
subscription_id: str,
resource_group: str,
workspace_name: str):
use_until_time = datetime.fromisoformat(self.use_until)
return (use_until_time > datetime.now()
and self.subscription_id == subscription_id
and self.resource_group == resource_group
and self.workspace_name == workspace_name)
class Endpoint:
def __init__(self,
endpoint_name: str,
endpoint_url: str,
endpoint_api_key: str):
self.deployments: List[Deployment] = []
self.default_deployment: Deployment = None
self.endpoint_url = endpoint_url
self.endpoint_api_key = endpoint_api_key
self.endpoint_name = endpoint_name
class Deployment:
def __init__(self,
deployment_name: str,
model_family: str):
self.model_family = model_family
self.deployment_name = deployment_name
class ServerlessEndpointsContainer:
API_VERSION = "2023-08-01-preview"
def _get_headers(self, token: str) -> Dict[str, str]:
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
}
return headers
def get_serverless_arm_url(self, subscription_id, resource_group, workspace_name, suffix=None):
suffix = "" if suffix is None else f"/{suffix}"
return f"https://management.azure.com/subscriptions/{subscription_id}" \
+ f"/resourceGroups/{resource_group}/providers/Microsoft.MachineLearningServices" \
+ f"/workspaces/{workspace_name}/serverlessEndpoints{suffix}?api-version={self.API_VERSION}"
def _list(self, token: str, subscription_id: str, resource_group: str, workspace_name: str):
headers = self._get_headers(token)
url = self.get_serverless_arm_url(subscription_id, resource_group, workspace_name)
try:
response = requests.get(url, headers=headers, timeout=50)
return json.loads(response.content)['value']
except Exception as e:
print(f"Error encountered when listing serverless endpoints. Exception: {e}", file=sys.stderr)
return []
def _validate_model_family(self, serverless_endpoint):
try:
if serverless_endpoint.get('properties', {}).get('provisioningState') != "Succeeded":
return None
if (try_get_from_dict(serverless_endpoint,
['properties', 'offer', 'publisher']) == 'Meta'
and "llama" in try_get_from_dict(serverless_endpoint,
['properties', 'offer', 'offerName'])):
return ModelFamily.LLAMA
if (try_get_from_dict(serverless_endpoint,
['properties', 'marketplaceInfo', 'publisherId']) == 'metagenai'
and "llama" in try_get_from_dict(serverless_endpoint,
['properties', 'marketplaceInfo', 'offerId'])):
return ModelFamily.LLAMA
except Exception as ex:
print(f"Ignoring endpoint {serverless_endpoint['id']} due to error: {ex}", file=sys.stderr)
return None
def list_serverless_endpoints(self,
token,
subscription_id,
resource_group,
workspace_name,
return_endpoint_url: bool = False):
serverlessEndpoints = self._list(token, subscription_id, resource_group, workspace_name)
result = []
for e in serverlessEndpoints:
if (self._validate_model_family(e)):
result.append({
"value": f"serverlessEndpoint/{e['name']}",
"display_value": f"[Serverless] {e['name']}",
# "hyperlink": self.get_endpoint_url(e.endpoint_name)
"description": f"Serverless Endpoint: {e['name']}",
})
if return_endpoint_url:
result[-1]['url'] = try_get_from_dict(e, ['properties', 'inferenceEndpoint', 'uri'])
return result
def _list_endpoint_key(self,
token: str,
subscription_id: str,
resource_group: str,
workspace_name: str,
serverless_endpoint_name: str):
headers = self._get_headers(token)
url = self.get_serverless_arm_url(subscription_id,
resource_group,
workspace_name,
f"{serverless_endpoint_name}/listKeys")
try:
response = requests.post(url, headers=headers, timeout=50)
return json.loads(response.content)
except Exception as e:
print(f"Unable to get key from selected serverless endpoint. Exception: {e}", file=sys.stderr)
def get_serverless_endpoint(self,
token: str,
subscription_id: str,
resource_group: str,
workspace_name: str,
serverless_endpoint_name: str):
headers = self._get_headers(token)
url = self.get_serverless_arm_url(subscription_id, resource_group, workspace_name, serverless_endpoint_name)
try:
response = requests.get(url, headers=headers, timeout=50)
return json.loads(response.content)
except Exception as e:
print(f"Unable to get selected serverless endpoint. Exception: {e}", file=sys.stderr)
def get_serverless_endpoint_key(self,
token: str,
subscription_id: str,
resource_group: str,
workspace_name: str,
serverless_endpoint_name: str) -> Tuple[str, str, str]:
endpoint = self.get_serverless_endpoint(token,
subscription_id,
resource_group,
workspace_name,
serverless_endpoint_name)
endpoint_url = try_get_from_dict(endpoint, ['properties', 'inferenceEndpoint', 'uri'])
model_family = self._validate_model_family(endpoint)
endpoint_api_key = self._list_endpoint_key(token,
subscription_id,
resource_group,
workspace_name,
serverless_endpoint_name)['primaryKey']
return (endpoint_url,
endpoint_api_key,
model_family)
class CustomConnectionsContainer:
def get_azure_custom_connection_names(self,
credential,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
return_endpoint_url: bool = False
) -> List[Dict[str, Union[str, int, float, list, Dict]]]:
result = []
try:
from promptflow.azure import PFClient as AzurePFClient
azure_pf_client = AzurePFClient(
credential=credential,
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name)
except Exception:
message = "Skipping Azure PFClient. To connect, please ensure the following environment variables are set: "
message += ",".join(ENDPOINT_REQUIRED_ENV_VARS)
print(message, file=sys.stderr)
return result
connections = azure_pf_client._connections.list()
for c in connections:
if c.type == ConnectionType.CUSTOM and "model_family" in c.configs:
try:
validate_model_family(c.configs["model_family"])
result.append({
"value": f"connection/{c.name}",
"display_value": f"[Connection] {c.name}",
# "hyperlink": "",
"description": f"Custom Connection: {c.name}",
})
if return_endpoint_url:
result[-1]['url'] = c.configs['endpoint_url']
except Exception:
# silently ignore unsupported model family
continue
return result
def get_local_custom_connection_names(self,
return_endpoint_url: bool = False
) -> List[Dict[str, Union[str, int, float, list, Dict]]]:
result = []
try:
from promptflow import PFClient as LocalPFClient
except Exception as e:
print(f"Skipping Local PFClient. Exception: {e}", file=sys.stderr)
return result
pf = LocalPFClient()
connections = pf.connections.list()
for c in connections:
if c.type == ConnectionType.CUSTOM and "model_family" in c.configs:
try:
validate_model_family(c.configs["model_family"])
result.append({
"value": f"localConnection/{c.name}",
"display_value": f"[Local Connection] {c.name}",
# "hyperlink": "",
"description": f"Local Custom Connection: {c.name}",
})
if return_endpoint_url:
result[-1]['url'] = c.configs['endpoint_url']
except Exception:
# silently ignore unsupported model family
continue
return result
def get_endpoint_from_local_custom_connection(self, connection_name) -> Tuple[str, str, str]:
from promptflow import PFClient as LocalPFClient
pf = LocalPFClient()
connection = pf.connections.get(connection_name, with_secrets=True)
return self.get_endpoint_from_custom_connection(connection)
def get_endpoint_from_azure_custom_connection(self,
credential,
subscription_id,
resource_group_name,
workspace_name,
connection_name) -> Tuple[str, str, str]:
from promptflow.azure import PFClient as AzurePFClient
azure_pf_client = AzurePFClient(
credential=credential,
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name)
connection = azure_pf_client._arm_connections.get(connection_name)
return self.get_endpoint_from_custom_connection(connection)
def get_endpoint_from_custom_connection(self, connection: CustomConnection) -> Tuple[str, str, str]:
conn_dict = dict(connection)
for key in REQUIRED_CONFIG_KEYS:
if key not in conn_dict:
accepted_keys = ",".join([key for key in REQUIRED_CONFIG_KEYS])
raise OpenModelLLMKeyValidationError(
message=f"""Required key `{key}` not found in given custom connection.
Required keys are: {accepted_keys}."""
)
for key in REQUIRED_SECRET_KEYS:
if key not in conn_dict:
accepted_keys = ",".join([key for key in REQUIRED_SECRET_KEYS])
raise OpenModelLLMKeyValidationError(
message=f"""Required secret key `{key}` not found in given custom connection.
Required keys are: {accepted_keys}."""
)
model_family = validate_model_family(connection.configs['model_family'])
return (connection.configs['endpoint_url'],
connection.secrets['endpoint_api_key'],
model_family)
def list_custom_connection_names(self,
credential,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
return_endpoint_url: bool = False
) -> List[Dict[str, Union[str, int, float, list, Dict]]]:
azure_custom_connections = self.get_azure_custom_connection_names(credential,
subscription_id,
resource_group_name,
workspace_name,
return_endpoint_url)
local_custom_connections = self.get_local_custom_connection_names(return_endpoint_url)
return azure_custom_connections + local_custom_connections
class EndpointsContainer:
def get_ml_client(self,
credential,
subscription_id: str,
resource_group_name: str,
workspace_name: str):
try:
from azure.ai.ml import MLClient
return MLClient(
credential=credential,
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name)
except Exception as e:
message = "Unable to connect to AzureML. Please ensure the following environment variables are set: "
message += ",".join(ENDPOINT_REQUIRED_ENV_VARS)
message += "\nException: " + str(e)
raise OpenModelLLMOnlineEndpointError(message=message)
def get_endpoints_and_deployments(self,
credential,
subscription_id: str,
resource_group_name: str,
workspace_name: str) -> List[Endpoint]:
ml_client = self.get_ml_client(credential, subscription_id, resource_group_name, workspace_name)
list_of_endpoints: List[Endpoint] = []
for ep in ml_client.online_endpoints.list():
endpoint = Endpoint(
endpoint_name=ep.name,
endpoint_url=ep.scoring_uri,
endpoint_api_key=ml_client.online_endpoints.get_keys(ep.name).primary_key)
ordered_deployment_names = sorted(ep.traffic, key=lambda item: item[1])
deployments = ml_client.online_deployments.list(ep.name)
for deployment_name in ordered_deployment_names:
for d in deployments:
if d.name == deployment_name:
model_family = get_model_type(d.model)
if model_family is None:
continue
deployment = Deployment(deployment_name=d.name, model_family=model_family)
endpoint.deployments.append(deployment)
# Deployment are ordered by traffic level, first in is default
if endpoint.default_deployment is None:
endpoint.default_deployment = deployment
if len(endpoint.deployments) > 0:
list_of_endpoints.append(endpoint)
self.__endpoints_and_deployments = list_of_endpoints
return self.__endpoints_and_deployments
def get_endpoint_url(self, endpoint_name, subscription_id, resource_group_name, workspace_name):
return f"https://ml.azure.com/endpoints/realtime/{endpoint_name}" \
+ f"/detail?wsid=/subscriptions/{subscription_id}" \
+ f"/resourceGroups/{resource_group_name}" \
+ f"/providers/Microsoft.MachineLearningServices/workspaces/{workspace_name}"
def list_endpoint_names(self,
credential,
subscription_id,
resource_group_name,
workspace_name,
return_endpoint_url: bool = False
) -> List[Dict[str, Union[str, int, float, list, Dict]]]:
'''Function for listing endpoints in the UX'''
endpoints_and_deployments = self.get_endpoints_and_deployments(
credential,
subscription_id,
resource_group_name,
workspace_name)
result = []
for e in endpoints_and_deployments:
result.append({
"value": f"onlineEndpoint/{e.endpoint_name}",
"display_value": f"[Online] {e.endpoint_name}",
"hyperlink": self.get_endpoint_url(e.endpoint_name,
subscription_id,
resource_group_name,
workspace_name),
"description": f"Online Endpoint: {e.endpoint_name}",
})
if return_endpoint_url:
result[-1]['url'] = e.endpoint_url
return result
def list_deployment_names(self,
credential,
subscription_id,
resource_group_name,
workspace_name,
endpoint_name: str
) -> List[Dict[str, Union[str, int, float, list, Dict]]]:
'''Function for listing deployments in the UX'''
if endpoint_name is None:
return []
endpoints_and_deployments = self.get_endpoints_and_deployments(
credential,
subscription_id,
resource_group_name,
workspace_name)
for endpoint in endpoints_and_deployments:
if endpoint.endpoint_name == endpoint_name:
result = []
for d in endpoint.deployments:
result.append({
"value": d.deployment_name,
"display_value": d.deployment_name,
# "hyperlink": '',
"description": f"this is {d.deployment_name} item",
})
return result
return []
ENDPOINT_CONTAINER = EndpointsContainer()
CUSTOM_CONNECTION_CONTAINER = CustomConnectionsContainer()
SERVERLESS_ENDPOINT_CONTAINER = ServerlessEndpointsContainer()
def is_serverless_endpoint(endpoint_url: str) -> bool:
return "serverless.ml.azure.com" in endpoint_url or "inference.ai.azure.com" in endpoint_url
def try_get_from_dict(some_dict: Dict, key_list: List):
for key in key_list:
if some_dict is None:
return some_dict
elif key in some_dict:
some_dict = some_dict[key]
else:
return None
return some_dict
def parse_endpoint_connection_type(endpoint_connection_name: str) -> Tuple[str, str]:
endpoint_connection_details = endpoint_connection_name.split("/")
return (endpoint_connection_details[0].lower(), endpoint_connection_details[1])
def list_endpoint_names(subscription_id: str,
resource_group_name: str,
workspace_name: str,
return_endpoint_url: bool = False,
force_refresh: bool = False) -> List[Dict[str, Union[str, int, float, list, Dict]]]:
cache_file_path = None
try:
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
cache_file_path = os.path.join(os.path.dirname(temp_file.name), CONNECTION_CACHE_FILE)
print(f"Attempting to read connection cache. File path: {cache_file_path}", file=sys.stdout)
if force_refresh:
print("....skipping. force_refresh is True", file=sys.stdout)
else:
with open(cache_file_path, 'r') as file:
cache = ConnectionCache.from_filename(file)
if cache.can_use(subscription_id, resource_group_name, workspace_name):
if len(cache.connection_names) > 0:
print("....using Connection Cache File", file=sys.stdout)
return cache.connection_names
else:
print("....skipping. No connections in file", file=sys.stdout)
else:
print("....skipping. File not relevant", file=sys.stdout)
except Exception as e:
print(f"....failed to find\\read connection cache file. Regenerating. Error:{e}", file=sys.stdout)
try:
from azure.identity import DefaultAzureCredential
credential = DefaultAzureCredential(exclude_interactive_browser_credential=False)
token = credential.get_token("https://management.azure.com/.default").token
except Exception as e:
print(f"Skipping list_endpoint_names. Exception: {e}", file=sys.stderr)
msg = "Exception getting token: Please retry"
return [{"value": msg, "display_value": msg, "description": msg}]
serverless_endpoints = SERVERLESS_ENDPOINT_CONTAINER.list_serverless_endpoints(token,
subscription_id,
resource_group_name,
workspace_name,
return_endpoint_url)
online_endpoints = ENDPOINT_CONTAINER.list_endpoint_names(credential,
subscription_id,
resource_group_name,
workspace_name,
return_endpoint_url)
custom_connections = CUSTOM_CONNECTION_CONTAINER.list_custom_connection_names(credential,
subscription_id,
resource_group_name,
workspace_name,
return_endpoint_url)
list_of_endpoints = custom_connections + serverless_endpoints + online_endpoints
cache = ConnectionCache(use_until=(datetime.now() + timedelta(minutes=5)).isoformat(),
subscription_id=subscription_id,
resource_group=resource_group_name,
workspace_name=workspace_name,
connection_names=list_of_endpoints)
if len(list_of_endpoints) == 0:
msg = "No endpoints found. Please add a connection."
return [{"value": msg, "display_value": msg, "description": msg}]
if cache_file_path is not None:
try:
print(f"Attempting to write connection cache. File path: {cache_file_path}", file=sys.stdout)
with open(cache_file_path, 'w') as file:
json.dump(cache, file, default=lambda obj: obj.__dict__)
print("....written", file=sys.stdout)
except Exception as e:
print(f"""....failed to write connection cache file. Will need to reload next time.
Error:{e}""", file=sys.stdout)
return list_of_endpoints
def list_deployment_names(subscription_id: str,
resource_group_name: str,
workspace_name: str,
endpoint: str = None) -> List[Dict[str, Union[str, int, float, list, Dict]]]:
deployment_default_list = [{
"value": DEPLOYMENT_DEFAULT,
"display_value": DEPLOYMENT_DEFAULT,
"description": "This will use the default deployment for the selected online endpoint."
+ "You can also manually enter a deployment name here."
}]
if endpoint is None or endpoint.strip() == "" or "/" not in endpoint:
return deployment_default_list
(endpoint_connection_type, endpoint_connection_name) = parse_endpoint_connection_type(endpoint)
if endpoint_connection_type != "onlineendpoint":
return deployment_default_list
try:
from azure.identity import DefaultAzureCredential
credential = DefaultAzureCredential(exclude_interactive_browser_credential=False)
except Exception as e:
print(f"Skipping list_deployment_names. Exception: {e}", file=sys.stderr)
return deployment_default_list
return deployment_default_list + ENDPOINT_CONTAINER.list_deployment_names(
credential,
subscription_id,
resource_group_name,
workspace_name,
endpoint_connection_name
)
def get_model_type(deployment_model: str) -> str:
m = re.match(r'azureml://registries/[^/]+/models/([^/]+)/versions/', deployment_model)
if m is None:
print(f"Unexpected model format: {deployment_model}. Skipping", file=sys.stdout)
return None
model = m[1].lower()
if model.startswith("llama-2"):
return ModelFamily.LLAMA
elif model.startswith("tiiuae-falcon"):
return ModelFamily.FALCON
elif model.startswith("databricks-dolly-v2"):
return ModelFamily.DOLLY
elif model.startswith("gpt2"):
return ModelFamily.GPT2
else:
# Not found and\or handled. Ignore this endpoint\deployment
print(f"Unexpected model type: {model} derived from deployed model: {deployment_model}")
return None
def validate_model_family(model_family: str):
try:
return ModelFamily[model_family]
except KeyError:
accepted_models = ",".join([model.name for model in ModelFamily])
raise OpenModelLLMKeyValidationError(
message=f"""Given model_family '{model_family}' not recognized.
Supported models are: {accepted_models}."""
)
class ModelFamily(str, Enum):
LLAMA = "LLaMa"
DOLLY = "Dolly"
GPT2 = "GPT-2"
FALCON = "Falcon"
@classmethod
def _missing_(cls, value):
value = value.lower()
for member in cls:
if member.lower() == value:
return member
return None
STANDARD_CONTRACT_MODELS = [ModelFamily.DOLLY, ModelFamily.GPT2, ModelFamily.FALCON]
class API(str, Enum):
CHAT = "chat"
COMPLETION = "completion"
class ContentFormatterBase:
"""Transform request and response of AzureML endpoint to match with
required schema.
"""
content_type: Optional[str] = "application/json"
"""The MIME type of the input data passed to the endpoint"""
accepts: Optional[str] = "application/json"
"""The MIME type of the response data returned from the endpoint"""
@staticmethod
def escape_special_characters(prompt: str) -> str:
"""Escapes any special characters in `prompt`"""
return re.sub(
r'\\([\\\"a-zA-Z])',
r'\\\1',
prompt)
@staticmethod
def parse_chat(chat_str: str) -> List[Dict[str, str]]:
# LLaMa only supports below roles.
separator = r"(?i)\n*(system|user|assistant)\s*:\s*\n"
chunks = re.split(separator, chat_str)
# remove any empty chunks
chunks = [c.strip() for c in chunks if c.strip()]
chat_list = []
for index in range(0, len(chunks), 2):
role = chunks[index].lower()
# Check if prompt follows chat api message format and has valid role.
try:
validate_role(role, VALID_LLAMA_ROLES)
except ChatAPIInvalidRole as e:
raise OpenModelLLMUserError(message=e.message)
if len(chunks) <= index + 1:
message = "Unexpected chat format. Please ensure the query matches the chat format of the model used."
raise OpenModelLLMUserError(message=message)
chat_list.append({
"role": role,
"content": chunks[index+1]
})
return chat_list
@abstractmethod
def format_request_payload(self, prompt: str, model_kwargs: Dict) -> str:
"""Formats the request body according to the input schema of
the model. Returns bytes or seekable file like object in the
format specified in the content_type request header.
"""
@abstractmethod
def format_response_payload(self, output: bytes) -> str:
"""Formats the response body according to the output
schema of the model. Returns the data type that is
received from the response.
"""
class MIRCompleteFormatter(ContentFormatterBase):
"""Content handler for LLMs from the HuggingFace catalog."""
def format_request_payload(self, prompt: str, model_kwargs: Dict) -> str:
input_str = json.dumps(
{
"input_data": {"input_string": [ContentFormatterBase.escape_special_characters(prompt)]},
"parameters": model_kwargs,
}
)
return input_str
def format_response_payload(self, output: bytes) -> str:
"""These models only support generation - expect a single output style"""
response_json = json.loads(output)
if len(response_json) > 0 and "0" in response_json[0]:
if "0" in response_json[0]:
return response_json[0]["0"]
elif "output" in response_json:
return response_json["output"]
error_message = f"Unexpected response format. Response: {response_json}"
print(error_message, file=sys.stderr)
raise OpenSourceLLMOnlineEndpointError(message=error_message)
class LlamaContentFormatter(ContentFormatterBase):
"""Content formatter for LLaMa"""
def __init__(self, api: API, chat_history: Optional[str] = ""):
super().__init__()
self.api = api
self.chat_history = chat_history
def format_request_payload(self, prompt: str, model_kwargs: Dict) -> str:
"""Formats the request according the the chosen api"""
if "do_sample" not in model_kwargs:
model_kwargs["do_sample"] = True
if self.api == API.CHAT:
prompt_value = ContentFormatterBase.parse_chat(self.chat_history)
else:
prompt_value = [ContentFormatterBase.escape_special_characters(prompt)]
return json.dumps(
{
"input_data":
{
"input_string": prompt_value,
"parameters": model_kwargs
}
}
)
def format_response_payload(self, output: bytes) -> str:
"""Formats response"""
response_json = json.loads(output)
if self.api == API.CHAT and "output" in response_json:
return response_json["output"]
elif self.api == API.COMPLETION and len(response_json) > 0 and "0" in response_json[0]:
return response_json[0]["0"]
else:
error_message = f"Unexpected response format. Response: {response_json}"
print(error_message, file=sys.stderr)
raise OpenModelLLMOnlineEndpointError(message=error_message)
class ServerlessLlamaContentFormatter(ContentFormatterBase):
"""Content formatter for LLaMa"""
def __init__(self, api: API, chat_history: Optional[str] = ""):
super().__init__()
self.api = api
self.chat_history = chat_history
self.model_id = "llama-2-7b-hf"
def format_request_payload(self, prompt: str, model_kwargs: Dict) -> str:
"""Formats the request according the the chosen api"""
# Modify max_tokens key for serverless
model_kwargs["max_tokens"] = model_kwargs["max_new_tokens"]
if self.api == API.CHAT:
messages = ContentFormatterBase.parse_chat(self.chat_history)
base_body = {
"model": self.model_id,
"messages": messages,
"n": 1,
}
base_body.update(model_kwargs)
else:
prompt_value = ContentFormatterBase.escape_special_characters(prompt)
base_body = {
"prompt": prompt_value,
"n": 1,
}
base_body.update(model_kwargs)
return json.dumps(base_body)
def format_response_payload(self, output: bytes) -> str:
"""Formats response"""
response_json = json.loads(output)
if self.api == API.CHAT and "choices" in response_json:
return response_json["choices"][0]["message"]["content"]
elif self.api == API.COMPLETION and "choices" in response_json:
return response_json["choices"][0]["text"]
else:
error_message = f"Unexpected response format. Response: {response_json}"
print(error_message, file=sys.stderr)
raise OpenModelLLMOnlineEndpointError(message=error_message)
class ContentFormatterFactory:
"""Factory class for supported models"""
def get_content_formatter(
model_family: ModelFamily, api: API, chat_history: Optional[List[Dict]] = [], endpoint_url: Optional[str] = ""
) -> ContentFormatterBase:
if model_family == ModelFamily.LLAMA:
if is_serverless_endpoint(endpoint_url):
return ServerlessLlamaContentFormatter(chat_history=chat_history, api=api)
else:
return LlamaContentFormatter(chat_history=chat_history, api=api)
elif model_family in STANDARD_CONTRACT_MODELS:
return MIRCompleteFormatter()
class AzureMLOnlineEndpoint:
"""Azure ML Online Endpoint models."""
endpoint_url: str = ""
"""URL of pre-existing Endpoint. Should be passed to constructor or specified as
env var `AZUREML_ENDPOINT_URL`."""
endpoint_api_key: str = ""
"""Authentication Key for Endpoint. Should be passed to constructor or specified as
env var `AZUREML_ENDPOINT_API_KEY`."""
content_formatter: Any = None
"""The content formatter that provides an input and output
transform function to handle formats between the LLM and
the endpoint"""
model_kwargs: Optional[Dict] = {}
"""Key word arguments to pass to the model."""
def __init__(
self,
endpoint_url: str,
endpoint_api_key: str,
content_formatter: ContentFormatterBase,
model_family: ModelFamily,
deployment_name: Optional[str] = None,
model_kwargs: Optional[Dict] = {},
):
self.endpoint_url = endpoint_url
self.endpoint_api_key = endpoint_api_key
self.deployment_name = deployment_name
self.content_formatter = content_formatter
self.model_kwargs = model_kwargs
self.model_family = model_family
def _call_endpoint(self, request_body: str) -> str:
"""call."""
headers = {
"Content-Type": "application/json",
"Authorization": ("Bearer " + self.endpoint_api_key),
"x-ms-user-agent": "PromptFlow/OpenModelLLM/" + self.model_family
}
# If this is not set it'll use the default deployment on the endpoint.
if self.deployment_name is not None:
headers["azureml-model-deployment"] = self.deployment_name
result = requests.post(self.endpoint_url, data=request_body, headers=headers)
if result.status_code != 200:
error_message = f"""Request failure while calling Online Endpoint Status:{result.status_code}
Error:{result.text}"""
print(error_message, file=sys.stderr)
raise OpenModelLLMOnlineEndpointError(message=error_message)
return result.text
def __call__(
self,
prompt: str
) -> str:
"""Call out to an AzureML Managed Online endpoint.
Args:
prompt: The prompt to pass into the model.
Returns:
The string generated by the model.
Example:
.. code-block:: python
response = azureml_model("Tell me a joke.")
"""
request_body = self.content_formatter.format_request_payload(prompt, self.model_kwargs)
endpoint_response = self._call_endpoint(request_body)
response = self.content_formatter.format_response_payload(endpoint_response)
return response
class OpenModelLLM(ToolProvider):
def __init__(self):
super().__init__()
def get_deployment_from_endpoint(self,
credential,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
endpoint_name: str,
deployment_name: str = None) -> Tuple[str, str, str]:
endpoints_and_deployments = ENDPOINT_CONTAINER.get_endpoints_and_deployments(
credential,
subscription_id,
resource_group_name,
workspace_name)
for ep in endpoints_and_deployments:
if ep.endpoint_name == endpoint_name:
if deployment_name is None:
return (ep.endpoint_url,
ep.endpoint_api_key,
ep.default_deployment.model_family)
for d in ep.deployments:
if d.deployment_name == deployment_name:
return (ep.endpoint_url,
ep.endpoint_api_key,
d.model_family)
message = f"""Invalid endpoint and deployment values.
Please ensure endpoint name and deployment names are correct, and the deployment was successfull.
Could not find endpoint: {endpoint_name} and deployment: {deployment_name}"""
raise OpenModelLLMUserError(message=message)
def sanitize_endpoint_url(self,
endpoint_url: str,
api_type: API):
if is_serverless_endpoint(endpoint_url):
if api_type == API.CHAT:
if not endpoint_url.endswith("/v1/chat/completions"):
return endpoint_url + "/v1/chat/completions"
else:
if not endpoint_url.endswith("/v1/completions"):
return endpoint_url + "/v1/completions"
return endpoint_url
def get_endpoint_details(self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
endpoint: str,
api_type: API,
deployment_name: str = None,
**kwargs) -> Tuple[str, str, str]:
if self.endpoint_values_in_kwargs(**kwargs):
endpoint_url = kwargs["endpoint_url"]
endpoint_api_key = kwargs["endpoint_api_key"]
model_family = kwargs["model_family"]
# clean these up, aka don't send them to MIR
del kwargs["endpoint_url"]
del kwargs["endpoint_api_key"]
del kwargs["model_family"]
return (endpoint_url, endpoint_api_key, model_family)
(endpoint_connection_type, endpoint_connection_name) = parse_endpoint_connection_type(endpoint)
print(f"endpoint_connection_type: {endpoint_connection_type} name: {endpoint_connection_name}", file=sys.stdout)
con_type = endpoint_connection_type.lower()
if con_type in AUTH_REQUIRED_CONNECTION_TYPES:
try:
from azure.identity import DefaultAzureCredential
credential = DefaultAzureCredential(exclude_interactive_browser_credential=False)
token = credential.get_token("https://management.azure.com/.default").token
except Exception as e:
message = f"""Error encountered while attempting to Authorize access to {endpoint}.
Exception: {e}"""
print(message, file=sys.stderr)
raise OpenModelLLMUserError(message=message)
if con_type == "serverlessendpoint":
(endpoint_url, endpoint_api_key, model_family) = SERVERLESS_ENDPOINT_CONTAINER.get_serverless_endpoint_key(
token,
subscription_id,
resource_group_name,
workspace_name,
endpoint_connection_name)
elif con_type == "onlineendpoint":
(endpoint_url, endpoint_api_key, model_family) = self.get_deployment_from_endpoint(
credential,
subscription_id,
resource_group_name,
workspace_name,
endpoint_connection_name,
deployment_name)
elif con_type == "connection":
(endpoint_url,
endpoint_api_key,
model_family) = CUSTOM_CONNECTION_CONTAINER.get_endpoint_from_azure_custom_connection(
credential,
subscription_id,
resource_group_name,
workspace_name,
endpoint_connection_name)
elif con_type == "localconnection":
(endpoint_url,
endpoint_api_key,
model_family) = CUSTOM_CONNECTION_CONTAINER.get_endpoint_from_local_custom_connection(
endpoint_connection_name)
else:
raise OpenModelLLMUserError(message=f"Invalid endpoint connection type: {endpoint_connection_type}")
return (self.sanitize_endpoint_url(endpoint_url, api_type), endpoint_api_key, model_family)
def endpoint_values_in_kwargs(self, **kwargs):
# This is mostly for testing, suggest not using this since security\privacy concerns for the endpoint key
if 'endpoint_url' not in kwargs and 'endpoint_api_key' not in kwargs and 'model_family' not in kwargs:
return False
if 'endpoint_url' not in kwargs or 'endpoint_api_key' not in kwargs or 'model_family' not in kwargs:
message = """Endpoint connection via kwargs not fully set.
If using kwargs, the following values must be set: endpoint_url, endpoint_api_key, and model_family"""
raise OpenModelLLMKeyValidationError(message=message)
return True
@tool
@handle_online_endpoint_error()
def call(
self,
prompt: PromptTemplate,
api: API,
endpoint_name: str,
deployment_name: Optional[str] = None,
temperature: Optional[float] = 1.0,
max_new_tokens: Optional[int] = 500,
top_p: Optional[float] = 1.0,
model_kwargs: Optional[Dict] = {},
**kwargs
) -> str:
# Sanitize deployment name. Empty deployment name is the same as None.
if deployment_name is not None:
deployment_name = deployment_name.strip()
if not deployment_name or deployment_name == DEPLOYMENT_DEFAULT:
deployment_name = None
print(f"Executing Open Model LLM Tool for endpoint: '{endpoint_name}', deployment: '{deployment_name}'",
file=sys.stdout)
(endpoint_url, endpoint_api_key, model_family) = self.get_endpoint_details(
subscription_id=os.getenv("AZUREML_ARM_SUBSCRIPTION", None),
resource_group_name=os.getenv("AZUREML_ARM_RESOURCEGROUP", None),
workspace_name=os.getenv("AZUREML_ARM_WORKSPACE_NAME", None),
endpoint=endpoint_name,
api_type=api,
deployment_name=deployment_name,
**kwargs)
prompt = render_jinja_template(prompt, trim_blocks=True, keep_trailing_newline=True, **kwargs)
model_kwargs["top_p"] = top_p
model_kwargs["temperature"] = temperature
model_kwargs["max_new_tokens"] = max_new_tokens
content_formatter = ContentFormatterFactory.get_content_formatter(
model_family=model_family,
api=api,
chat_history=prompt,
endpoint_url=endpoint_url
)
llm = AzureMLOnlineEndpoint(
endpoint_url=endpoint_url,
endpoint_api_key=endpoint_api_key,
model_family=model_family,
content_formatter=content_formatter,
deployment_name=deployment_name,
model_kwargs=model_kwargs
)
return llm(prompt)
| promptflow/src/promptflow-tools/promptflow/tools/open_model_llm.py/0 | {
"file_path": "promptflow/src/promptflow-tools/promptflow/tools/open_model_llm.py",
"repo_id": "promptflow",
"token_count": 23948
} | 5 |
include promptflow/azure/resources/*
include promptflow/_sdk/_serving/static/*
include promptflow/_sdk/_service/templates/*
recursive-include promptflow/_cli/data *
recursive-include promptflow/_sdk/data *
| promptflow/src/promptflow/MANIFEST.in/0 | {
"file_path": "promptflow/src/promptflow/MANIFEST.in",
"repo_id": "promptflow",
"token_count": 60
} | 6 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import argparse
import json
from typing import Callable, Dict, List, Optional, Tuple
from promptflow._cli._params import (
add_param_all_results,
add_param_archived_only,
add_param_columns_mapping,
add_param_connections,
add_param_environment_variables,
add_param_include_archived,
add_param_max_results,
add_param_output_format,
add_param_run_name,
add_param_set,
add_param_yes,
add_parser_build,
base_params,
)
from promptflow._cli._utils import (
_output_result_list_with_format,
activate_action,
confirm,
exception_handler,
list_of_dict_to_dict,
list_of_dict_to_nested_dict,
pretty_print_dataframe_as_table,
)
from promptflow._sdk._constants import MAX_SHOW_DETAILS_RESULTS, get_list_view_type
from promptflow._sdk._load_functions import load_run
from promptflow._sdk._pf_client import PFClient
from promptflow._sdk._run_functions import _create_run
from promptflow._sdk._utils import safe_parse_object_list
from promptflow._sdk.entities import Run
from promptflow.exceptions import UserErrorException
def add_run_parser(subparsers):
run_parser = subparsers.add_parser("run", description="A CLI tool to manage runs for prompt flow.", help="pf run")
subparsers = run_parser.add_subparsers()
add_run_create(subparsers)
# add_run_cancel(subparsers)
add_run_update(subparsers)
add_run_stream(subparsers)
add_run_list(subparsers)
add_run_show(subparsers)
add_run_show_details(subparsers)
add_run_show_metrics(subparsers)
add_run_visualize(subparsers)
add_run_archive(subparsers)
add_run_restore(subparsers)
add_run_delete(subparsers)
add_parser_build(subparsers, "run")
run_parser.set_defaults(action="run")
def add_run_create_common(subparsers, add_param_list, epilog: Optional[str] = None):
# pf run create --file batch_run.yaml [--stream]
add_param_file = lambda parser: parser.add_argument( # noqa: E731
"-f",
"--file",
dest="file",
type=str,
help="Local path to the YAML file containing the run definition. "
"Reference https://azuremlschemas.azureedge.net/promptflow/latest/Run.schema.json for the schema.",
)
add_param_stream = lambda parser: parser.add_argument( # noqa: E731
"-s",
"--stream",
action="store_true",
default=False,
help="Indicates whether to stream the run's logs to the console.",
)
add_param_flow = lambda parser: parser.add_argument( # noqa: E731
"--flow",
type=str,
help="Local path to the flow directory."
"If --file is provided, this path should be relative path to the file.",
)
add_param_variant = lambda parser: parser.add_argument( # noqa: E731
"--variant", type=str, help="Node & variant name in format of ${node_name.variant_name}."
)
add_param_run = lambda parser: parser.add_argument( # noqa: E731
"--run",
type=str,
help="Referenced flow run name referenced by current run. "
"For example, you can run an evaluation flow against an existing run.",
)
add_param_name = lambda parser: parser.add_argument("-n", "--name", type=str, help="Name of the run.") # noqa: E731
add_params = [
add_param_file,
add_param_stream,
add_param_flow,
add_param_variant,
add_param_run,
add_param_name,
add_param_columns_mapping,
# add env var overwrite
add_param_environment_variables,
add_param_connections,
add_param_set,
] + base_params
add_params.extend(add_param_list)
create_parser = activate_action(
name="create",
description=None,
epilog=epilog or "pf run create --file <local-path-to-yaml> [--stream]",
add_params=add_params,
subparsers=subparsers,
help_message="Create a run.",
action_param_name="sub_action",
)
return create_parser
def add_run_create(subparsers):
epilog = """
Examples:
# Create a run with YAML file:
pf run create -f <yaml-filename>
# Create a run with YAML file and replace another data in the YAML file:
pf run create -f <yaml-filename> --data <path-to-new-data-file-relative-to-yaml-file>
# Create a run from flow directory and reference a run:
pf run create --flow <path-to-flow-directory> --data <path-to-data-file> --column-mapping groundtruth='${data.answer}' prediction='${run.outputs.category}' --run <run-name> --variant "${summarize_text_content.variant_0}" --stream # noqa: E501
# Create a run from an existing run record folder
pf run create --source <path-to-run-folder>
"""
# data for pf has different help doc than pfazure
def add_param_data(parser):
parser.add_argument(
"--data",
type=str,
help="Local path to the data file." "If --file is provided, this path should be relative path to the file.",
)
def add_param_source(parser):
parser.add_argument("--source", type=str, help="Local path to the existing run record folder.")
add_run_create_common(subparsers, [add_param_data, add_param_source], epilog=epilog)
def add_run_cancel(subparsers):
epilog = """
Example:
# Cancel a run:
pf run cancel --name <name>
"""
add_params = [add_param_run_name] + base_params
activate_action(
name="cancel",
description=None,
epilog=epilog,
add_params=add_params,
subparsers=subparsers,
help_message="Cancel a run.",
action_param_name="sub_action",
)
def add_run_update(subparsers):
epilog = """
Example:
# Update a run metadata:
pf run update --name <name> --set display_name="<display-name>" description="<description>" tags.key="<value>"
"""
add_params = [
add_param_run_name,
add_param_set,
] + base_params
activate_action(
name="update",
description=None,
epilog=epilog,
add_params=add_params,
subparsers=subparsers,
help_message="Update a run metadata, including display name, description and tags.",
action_param_name="sub_action",
)
def add_run_stream(subparsers):
epilog = """
Example:
# Stream run logs:
pf run stream --name <name>
"""
add_params = [add_param_run_name] + base_params
activate_action(
name="stream",
description=None,
epilog=epilog,
add_params=add_params,
subparsers=subparsers,
help_message="Stream run logs to the console.",
action_param_name="sub_action",
)
def add_run_list(subparsers):
epilog = """
Examples:
# List runs status:
pf run list
# List most recent 10 runs status:
pf run list --max-results 10
# List active and archived runs status:
pf run list --include-archived
# List archived runs status only:
pf run list --archived-only
# List all runs status:
pf run list --all-results
# List all runs status as table:
pf run list --output table
"""
add_params = [
add_param_max_results,
add_param_all_results,
add_param_archived_only,
add_param_include_archived,
add_param_output_format,
] + base_params
activate_action(
name="list",
description=None,
epilog=epilog,
add_params=add_params,
subparsers=subparsers,
help_message="List runs.",
action_param_name="sub_action",
)
def add_run_show(subparsers):
epilog = """
Example:
# Show the status of a run:
pf run show --name <name>
"""
add_params = [add_param_run_name] + base_params
activate_action(
name="show",
description=None,
epilog=epilog,
add_params=add_params,
subparsers=subparsers,
help_message="Show details for a run.",
action_param_name="sub_action",
)
def add_run_show_details(subparsers):
epilog = """
Example:
# View input(s) and output(s) of a run:
pf run show-details --name <name>
"""
add_param_max_results = lambda parser: parser.add_argument( # noqa: E731
"-r",
"--max-results",
dest="max_results",
type=int,
default=MAX_SHOW_DETAILS_RESULTS,
help=f"Number of lines to show. Default is {MAX_SHOW_DETAILS_RESULTS}.",
)
add_params = [add_param_max_results, add_param_run_name, add_param_all_results] + base_params
activate_action(
name="show-details",
description=None,
epilog=epilog,
add_params=add_params,
subparsers=subparsers,
help_message="Preview a run's input(s) and output(s).",
action_param_name="sub_action",
)
def add_run_show_metrics(subparsers):
epilog = """
Example:
# View metrics of a run:
pf run show-metrics --name <name>
"""
add_params = [add_param_run_name] + base_params
activate_action(
name="show-metrics",
description=None,
epilog=epilog,
add_params=add_params,
subparsers=subparsers,
help_message="Print run metrics to the console.",
action_param_name="sub_action",
)
def add_run_visualize(subparsers):
epilog = """
Examples:
# Visualize a run:
pf run visualize -n <name>
# Visualize runs:
pf run visualize --names "<name1,name2>"
pf run visualize --names "<name1>, <name2>"
"""
add_param_name = lambda parser: parser.add_argument( # noqa: E731
"-n", "--names", type=str, required=True, help="Name of the runs, comma separated."
)
add_param_html_path = lambda parser: parser.add_argument( # noqa: E731
"--html-path", type=str, default=None, help=argparse.SUPPRESS
)
add_params = [add_param_name, add_param_html_path] + base_params
activate_action(
name="visualize",
description=None,
epilog=epilog,
add_params=add_params,
subparsers=subparsers,
help_message="Visualize a run.",
action_param_name="sub_action",
)
def add_run_delete(subparsers):
epilog = """
Example:
# Caution: pf run delete is irreversible.
# This operation will delete the run permanently from your local disk.
# Both run entity and output data will be deleted.
# Delete a run:
pf run delete -n "<name>"
"""
add_params = [add_param_run_name, add_param_yes] + base_params
activate_action(
name="delete",
description=None,
epilog=epilog,
add_params=add_params,
subparsers=subparsers,
help_message="Delete a run irreversible.",
action_param_name="sub_action",
)
def add_run_archive(subparsers):
epilog = """
Example:
# Archive a run:
pf run archive --name <name>
"""
add_params = [add_param_run_name] + base_params
activate_action(
name="archive",
description=None,
epilog=epilog,
add_params=add_params,
subparsers=subparsers,
help_message="Archive a run.",
action_param_name="sub_action",
)
def add_run_restore(subparsers):
epilog = """
Example:
# Restore an archived run:
pf run restore --name <name>
"""
add_params = [add_param_run_name] + base_params
activate_action(
name="restore",
description=None,
epilog=epilog,
add_params=add_params,
subparsers=subparsers,
help_message="Restore an archived run.",
action_param_name="sub_action",
)
def dispatch_run_commands(args: argparse.Namespace):
if args.sub_action == "create":
create_run(create_func=_create_run, args=args)
elif args.sub_action == "update":
update_run(name=args.name, params=args.params_override)
elif args.sub_action == "stream":
stream_run(name=args.name)
elif args.sub_action == "list":
list_runs(
max_results=args.max_results,
all_results=args.all_results,
archived_only=args.archived_only,
include_archived=args.include_archived,
output=args.output,
)
elif args.sub_action == "show":
show_run(name=args.name)
elif args.sub_action == "show-details":
show_run_details(name=args.name, max_results=args.max_results, all_results=args.all_results)
elif args.sub_action == "show-metrics":
show_run_metrics(name=args.name)
elif args.sub_action == "visualize":
visualize_run(names=args.names, html_path=args.html_path)
elif args.sub_action == "archive":
archive_run(name=args.name)
elif args.sub_action == "restore":
restore_run(name=args.name)
elif args.sub_action == "export":
export_run(args)
elif args.sub_action == "delete":
delete_run(args.name, args.yes)
else:
raise ValueError(f"Unrecognized command: {args.sub_action}")
def _parse_metadata_args(params: List[Dict[str, str]]) -> Tuple[Optional[str], Optional[str], Optional[Dict[str, str]]]:
display_name, description, tags = None, None, {}
for param in params:
for k, v in param.items():
if k == "display_name":
if display_name is not None:
raise ValueError("Duplicate argument: 'display_name'.")
display_name = v
elif k == "description":
if description is not None:
raise ValueError("Duplicate argument: 'description'.")
description = v
elif k.startswith("tags."):
tag_key = k.replace("tags.", "")
if tag_key in tags:
raise ValueError(f"Duplicate argument: 'tags.{tag_key}'.")
tags[tag_key] = v
if len(tags) == 0:
tags = None
return display_name, description, tags
@exception_handler("Update run")
def update_run(name: str, params: List[Dict[str, str]]) -> None:
# params_override can have multiple items when user specifies with
# `--set key1=value1 key2=value`
# so we need to merge them first.
display_name, description, tags = _parse_metadata_args(params)
pf_client = PFClient()
run = pf_client.runs.update(
name=name,
display_name=display_name,
description=description,
tags=tags,
)
print(json.dumps(run._to_dict(), indent=4))
@exception_handler("Stream run")
def stream_run(name: str) -> None:
pf_client = PFClient()
run = pf_client.runs.stream(name=name)
print(json.dumps(run._to_dict(), indent=4))
@exception_handler("List runs")
def list_runs(
max_results: int,
all_results: bool,
archived_only: bool,
include_archived: bool,
output,
):
pf_client = PFClient()
# aligned behaviour with v2 SDK, all_results will overwrite max_results
if all_results:
max_results = None
runs = pf_client.runs.list(
max_results=max_results,
list_view_type=get_list_view_type(archived_only=archived_only, include_archived=include_archived),
)
# hide additional info and debug info in run list for better user experience
parser = lambda run: run._to_dict(exclude_additional_info=True, exclude_debug_info=True) # noqa: E731
json_list = safe_parse_object_list(
obj_list=runs,
parser=parser,
message_generator=lambda x: f"Error parsing run {x.name!r}, skipped.",
)
_output_result_list_with_format(result_list=json_list, output_format=output)
return runs
@exception_handler("Show run")
def show_run(name: str) -> None:
pf_client = PFClient()
run = pf_client.runs.get(name=name)
print(json.dumps(run._to_dict(), indent=4))
@exception_handler("Show run details")
def show_run_details(name: str, max_results: int, all_results: bool) -> None:
pf_client = PFClient()
details = pf_client.runs.get_details(name=name, max_results=max_results, all_results=all_results)
pretty_print_dataframe_as_table(details)
@exception_handler("Show run metrics")
def show_run_metrics(name: str) -> None:
pf_client = PFClient()
metrics = pf_client.runs.get_metrics(name=name)
print(json.dumps(metrics, indent=4))
@exception_handler("Visualize run")
def visualize_run(names: str, html_path: Optional[str] = None) -> None:
run_names = [name.strip() for name in names.split(",")]
pf_client = PFClient()
pf_client.runs.visualize(run_names, html_path=html_path)
@exception_handler("Archive run")
def archive_run(name: str) -> None:
pf_client = PFClient()
run = pf_client.runs.archive(name=name)
print(json.dumps(run._to_dict(), indent=4))
@exception_handler("Restore run")
def restore_run(name: str) -> None:
pf_client = PFClient()
run = pf_client.runs.restore(name=name)
print(json.dumps(run._to_dict(), indent=4))
def _parse_kv_pair(kv_pairs: str) -> Dict[str, str]:
result = {}
for kv_pairs in kv_pairs.split(","):
kv_pair = kv_pairs.strip()
if "=" not in kv_pair:
raise ValueError(f"Invalid key-value pair: {kv_pair}")
key, value = kv_pair.split("=", 1)
result[key] = value
return result
@exception_handler("Create run")
def create_run(create_func: Callable, args):
file = args.file
flow = args.flow
run_source = getattr(args, "source", None) # source is only available for pf args, not pfazure.
data = args.data
column_mapping = args.column_mapping
variant = args.variant
name = args.name
run = args.run
stream = args.stream
environment_variables = args.environment_variables
connections = args.connections
params_override = args.params_override or []
if environment_variables:
environment_variables = list_of_dict_to_dict(environment_variables)
if connections:
connections = list_of_dict_to_nested_dict(connections)
if column_mapping:
column_mapping = list_of_dict_to_dict(column_mapping)
if file:
for param_key, param in {
"name": name,
"flow": flow,
"variant": variant,
"data": data,
"column_mapping": column_mapping,
"run": run,
"environment_variables": environment_variables,
"connections": connections,
}.items():
if not param:
continue
params_override.append({param_key: param})
run = load_run(source=file, params_override=params_override)
elif flow:
run_data = {
"name": name,
"flow": flow,
"data": data,
"column_mapping": column_mapping,
"run": run,
"variant": variant,
"environment_variables": environment_variables,
"connections": connections,
}
# remove empty fields
run_data = {k: v for k, v in run_data.items() if v is not None}
run = Run._load(data=run_data, params_override=params_override)
elif run_source:
display_name, description, tags = _parse_metadata_args(params_override)
processed_params = {
"display_name": display_name,
"description": description,
"tags": tags,
}
run = Run._load_from_source(source=run_source, params_override=processed_params)
else:
raise UserErrorException("To create a run, one of [file, flow, source] must be specified.")
run = create_func(run=run, stream=stream)
if stream:
print("\n") # change new line to show run info
print(json.dumps(run._to_dict(), indent=4))
@exception_handler("Delete run")
def delete_run(name: str, skip_confirm: bool = False) -> None:
if confirm("Are you sure to delete run irreversibly?", skip_confirm):
pf_client = PFClient()
pf_client.runs.delete(name=name)
else:
print("The delete operation was canceled.")
def export_run(args):
raise NotImplementedError()
| promptflow/src/promptflow/promptflow/_cli/_pf/_run.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_cli/_pf/_run.py",
"repo_id": "promptflow",
"token_count": 8431
} | 7 |
$schema: https://azuremlschemas.azureedge.net/promptflow/latest/AzureOpenAIConnection.schema.json
name: {{ connection }}
type: azure_open_ai
api_key: "<user-input>"
api_base: "<user-input>"
api_type: "azure"
| promptflow/src/promptflow/promptflow/_cli/data/chat_flow/template/azure_openai.yaml.jinja2/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_cli/data/chat_flow/template/azure_openai.yaml.jinja2",
"repo_id": "promptflow",
"token_count": 83
} | 8 |
{% if icon %}
from pathlib import Path
{% endif %}
from promptflow import tool
from promptflow.connections import CustomConnection
@tool(
name="{{ tool_name }}",
description="This is {{ tool_name }} tool",
{% if icon %}
icon={{ icon }},
{% endif %}
{% for key, value in extra_info.items() %}
{{ key }}={{ value }},
{% endfor %}
)
def {{ tool_name }}(connection: CustomConnection, input_text: str) -> str:
# Replace with your tool code.
# Usually connection contains configs to connect to an API.
# Use CustomConnection is a dict. You can use it like: connection.api_key, connection.api_base
# Not all tools need a connection. You can remove it if you don't need it.
return "Hello " + input_text
| promptflow/src/promptflow/promptflow/_cli/data/package_tool/tool.py.jinja2/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_cli/data/package_tool/tool.py.jinja2",
"repo_id": "promptflow",
"token_count": 241
} | 9 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import inspect
from typing import Callable
class MetricLoggerManager:
_instance = None
def __init__(self):
self._metric_loggers = []
@staticmethod
def get_instance() -> "MetricLoggerManager":
if MetricLoggerManager._instance is None:
MetricLoggerManager._instance = MetricLoggerManager()
return MetricLoggerManager._instance
def log_metric(self, key, value, variant_id=None):
for logger in self._metric_loggers:
if len(inspect.signature(logger).parameters) == 2:
logger(key, value) # If the logger only accepts two parameters, we don't pass variant_id
else:
logger(key, value, variant_id)
def add_metric_logger(self, logger_func: Callable):
existing_logger = next((logger for logger in self._metric_loggers if logger is logger_func), None)
if existing_logger:
return
if not callable(logger_func):
return
sign = inspect.signature(logger_func)
# We accept two kinds of metric loggers:
# def log_metric(k, v)
# def log_metric(k, v, variant_id)
if len(sign.parameters) not in [2, 3]:
return
self._metric_loggers.append(logger_func)
def remove_metric_logger(self, logger_func: Callable):
self._metric_loggers.remove(logger_func)
def log_metric(key, value, variant_id=None):
"""Log a metric for current promptflow run.
:param key: Metric name.
:type key: str
:param value: Metric value.
:type value: float
:param variant_id: Variant id for the metric.
:type variant_id: str
"""
MetricLoggerManager.get_instance().log_metric(key, value, variant_id)
def add_metric_logger(logger_func: Callable):
MetricLoggerManager.get_instance().add_metric_logger(logger_func)
def remove_metric_logger(logger_func: Callable):
MetricLoggerManager.get_instance().remove_metric_logger(logger_func)
| promptflow/src/promptflow/promptflow/_core/metric_logger.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_core/metric_logger.py",
"repo_id": "promptflow",
"token_count": 838
} | 10 |
# flake8: noqa
"""Put some imports here for mlflow promptflow flavor usage.
DO NOT change the module names in "all" list. If the interface has changed in source code, wrap it here and keep
original function/module names the same as before, otherwise mldesigner will be broken by this change.
"""
from promptflow._sdk._constants import DAG_FILE_NAME
from promptflow._sdk._serving.flow_invoker import FlowInvoker
from promptflow._sdk._submitter import remove_additional_includes
from promptflow._sdk._utils import _merge_local_code_and_additional_includes
from promptflow._sdk.entities._flow import Flow
__all__ = [
"Flow",
"FlowInvoker",
"remove_additional_includes",
"_merge_local_code_and_additional_includes",
"DAG_FILE_NAME",
]
| promptflow/src/promptflow/promptflow/_sdk/_mlflow.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/_mlflow.py",
"repo_id": "promptflow",
"token_count": 236
} | 11 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
# this file is different from other files in this folder
# functions (APIs) defined in this file follows OTLP 1.1.0
# https://opentelemetry.io/docs/specs/otlp/#otlphttp-request
# to provide OTLP/HTTP endpoint as OTEL collector
import json
from flask import request
from google.protobuf.json_format import MessageToJson
from opentelemetry.proto.collector.trace.v1.trace_service_pb2 import ExportTraceServiceRequest
from promptflow._constants import SpanResourceFieldName
from promptflow._sdk._utils import parse_kv_from_pb_attribute
from promptflow._sdk.entities._trace import Span
def trace_collector():
content_type = request.headers.get("Content-Type")
# binary protobuf encoding
if "application/x-protobuf" in content_type:
traces_request = ExportTraceServiceRequest()
traces_request.ParseFromString(request.data)
for resource_span in traces_request.resource_spans:
resource_attributes = dict()
for attribute in resource_span.resource.attributes:
attribute_dict = json.loads(MessageToJson(attribute))
attr_key, attr_value = parse_kv_from_pb_attribute(attribute_dict)
resource_attributes[attr_key] = attr_value
resource = {
SpanResourceFieldName.ATTRIBUTES: resource_attributes,
SpanResourceFieldName.SCHEMA_URL: resource_span.schema_url,
}
for scope_span in resource_span.scope_spans:
for span in scope_span.spans:
# TODO: persist with batch
Span._from_protobuf_object(span, resource=resource)._persist()
return "Traces received", 200
# JSON protobuf encoding
elif "application/json" in content_type:
raise NotImplementedError
| promptflow/src/promptflow/promptflow/_sdk/_service/apis/collector.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/_service/apis/collector.py",
"repo_id": "promptflow",
"token_count": 729
} | 12 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import json
import logging
import mimetypes
import os
from pathlib import Path
from typing import Dict
from flask import Flask, g, jsonify, request
from promptflow._sdk._load_functions import load_flow
from promptflow._sdk._serving.extension.extension_factory import ExtensionFactory
from promptflow._sdk._serving.flow_invoker import FlowInvoker
from promptflow._sdk._serving.response_creator import ResponseCreator
from promptflow._sdk._serving.utils import (
enable_monitoring,
get_output_fields_to_remove,
get_sample_json,
handle_error_to_response,
load_request_data,
streaming_response_required,
)
from promptflow._sdk._utils import setup_user_agent_to_operation_context
from promptflow._utils.exception_utils import ErrorResponse
from promptflow._utils.logger_utils import LoggerFactory
from promptflow._version import VERSION
from promptflow.contracts.run_info import Status
from promptflow.exceptions import SystemErrorException
from promptflow.storage._run_storage import DummyRunStorage
from .swagger import generate_swagger
logger = LoggerFactory.get_logger("pfserving-app", target_stdout=True)
DEFAULT_STATIC_PATH = Path(__file__).parent / "static"
USER_AGENT = f"promptflow-local-serving/{VERSION}"
class PromptflowServingApp(Flask):
def init(self, **kwargs):
with self.app_context():
# default to local, can be override when creating the app
self.extension = ExtensionFactory.create_extension(logger, **kwargs)
self.flow_invoker: FlowInvoker = None
# parse promptflow project path
self.project_path = self.extension.get_flow_project_path()
logger.info(f"Project path: {self.project_path}")
self.flow_entity = load_flow(self.project_path)
self.flow = self.flow_entity._init_executable()
# enable environment_variables
environment_variables = kwargs.get("environment_variables", {})
os.environ.update(environment_variables)
default_environment_variables = self.flow.get_environment_variables_with_overrides()
self.set_default_environment_variables(default_environment_variables)
self.flow_name = self.extension.get_flow_name()
self.flow.name = self.flow_name
conn_data_override, conn_name_override = self.extension.get_override_connections(self.flow)
self.connections_override = conn_data_override
self.connections_name_override = conn_name_override
self.flow_monitor = self.extension.get_flow_monitor()
self.connection_provider = self.extension.get_connection_provider()
self.credential = self.extension.get_credential()
self.sample = get_sample_json(self.project_path, logger)
self.init_swagger()
# try to initialize the flow invoker
try:
self.init_invoker_if_not_exist()
except Exception as e:
if self.extension.raise_ex_on_invoker_initialization_failure(e):
raise e
# ensure response has the correct content type
mimetypes.add_type("application/javascript", ".js")
mimetypes.add_type("text/css", ".css")
setup_user_agent_to_operation_context(self.extension.get_user_agent())
add_default_routes(self)
# register blueprints
blue_prints = self.extension.get_blueprints()
for blue_print in blue_prints:
self.register_blueprint(blue_print)
def init_invoker_if_not_exist(self):
if self.flow_invoker:
return
logger.info("Promptflow executor starts initializing...")
self.flow_invoker = FlowInvoker(
self.project_path,
connection_provider=self.connection_provider,
streaming=streaming_response_required,
raise_ex=False,
connections=self.connections_override,
connections_name_overrides=self.connections_name_override,
# for serving, we don't need to persist intermediate result, this is to avoid memory leak.
storage=DummyRunStorage(),
credential=self.credential,
)
self.flow = self.flow_invoker.flow
# Set the flow name as folder name
self.flow.name = self.flow_name
self.response_fields_to_remove = get_output_fields_to_remove(self.flow, logger)
logger.info("Promptflow executor initializing succeed!")
def init_swagger(self):
self.response_fields_to_remove = get_output_fields_to_remove(self.flow, logger)
self.swagger = generate_swagger(self.flow, self.sample, self.response_fields_to_remove)
def set_default_environment_variables(self, default_environment_variables: Dict[str, str] = None):
if default_environment_variables is None:
return
for key, value in default_environment_variables.items():
if key not in os.environ:
os.environ[key] = value
def add_default_routes(app: PromptflowServingApp):
@app.errorhandler(Exception)
def handle_error(e):
err_resp, resp_code = handle_error_to_response(e, logger)
app.flow_monitor.handle_error(e, resp_code)
return err_resp, resp_code
@app.route("/score", methods=["POST"])
@enable_monitoring
def score():
"""process a flow request in the runtime."""
raw_data = request.get_data()
logger.debug(f"PromptFlow executor received data: {raw_data}")
app.init_invoker_if_not_exist()
if app.flow.inputs.keys().__len__() == 0:
data = {}
logger.info("Flow has no input, request data will be ignored.")
else:
logger.info("Start loading request data...")
data = load_request_data(app.flow, raw_data, logger)
# set context data
g.data = data
g.flow_id = app.flow.id or app.flow.name
run_id = g.get("req_id", None)
# TODO: refine this once we can directly set the input/output log level to DEBUG in flow_invoker.
disable_data_logging = logger.level >= logging.INFO
flow_result = app.flow_invoker.invoke(data, run_id=run_id, disable_input_output_logging=disable_data_logging)
g.flow_result = flow_result
# check flow result, if failed, return error response
if flow_result.run_info.status != Status.Completed:
if flow_result.run_info.error:
err = ErrorResponse(flow_result.run_info.error)
g.err_code = err.innermost_error_code
return jsonify(err.to_simplified_dict()), err.response_code
else:
# in case of run failed but can't find any error, return 500
exception = SystemErrorException("Flow execution failed without error message.")
return jsonify(ErrorResponse.from_exception(exception).to_simplified_dict()), 500
intermediate_output = flow_result.output or {}
# remove evaluation only fields
result_output = {k: v for k, v in intermediate_output.items() if k not in app.response_fields_to_remove}
response_creator = ResponseCreator(
flow_run_result=result_output,
accept_mimetypes=request.accept_mimetypes,
)
app.flow_monitor.setup_streaming_monitor_if_needed(response_creator, data, intermediate_output)
return response_creator.create_response()
@app.route("/swagger.json", methods=["GET"])
def swagger():
"""Get the swagger object."""
return jsonify(app.swagger)
@app.route("/health", methods=["GET"])
def health():
"""Check if the runtime is alive."""
return {"status": "Healthy", "version": VERSION}
@app.route("/version", methods=["GET"])
def version():
"""Check the runtime's version."""
build_info = os.environ.get("BUILD_INFO", "")
try:
build_info_dict = json.loads(build_info)
version = build_info_dict["build_number"]
except Exception:
version = VERSION
return {"status": "Healthy", "build_info": build_info, "version": version}
def create_app(**kwargs):
app = PromptflowServingApp(__name__)
if __name__ != "__main__":
app.logger.handlers = logger.handlers
app.logger.setLevel(logger.level)
app.init(**kwargs)
return app
if __name__ == "__main__":
create_app().run()
| promptflow/src/promptflow/promptflow/_sdk/_serving/app.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/_serving/app.py",
"repo_id": "promptflow",
"token_count": 3515
} | 13 |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Local Server Test App</title>
<style>
html,
body {
height: 100%;
width: 100%;
box-sizing: border-box;
padding: 0;
margin: 0;
}
#root {
height: 100%;
width: 100%;
display: flex;
}
</style>
<script type="module" crossorigin src="/static/index.js"></script>
</head>
<body>
<div id="root"></div>
<script>
const time = new Date().toISOString();
const now = performance.now();
console.log("[perf " + time + " " + now + "]" + " load script start");
</script>
</body>
</html>
| promptflow/src/promptflow/promptflow/_sdk/_serving/static/index.html/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/_serving/static/index.html",
"repo_id": "promptflow",
"token_count": 357
} | 14 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
"""
This file code has been vendored from azure-ai-ml repo.
Please do not edit it, unless really necessary
"""
# region Diff-imports
import os
from pathlib import Path, PureWindowsPath
from typing import Any, Iterable, List, Optional, Tuple, Union
from ._pathspec import GitWildMatchPattern, normalize_file
GIT_IGNORE_FILE_NAME = ".gitignore"
AML_IGNORE_FILE_NAME = ".amlignore"
def convert_windows_path_to_unix(path: Union[str, os.PathLike]) -> str:
return PureWindowsPath(path).as_posix()
# endregion
class IgnoreFile(object):
def __init__(self, file_path: Optional[Union[str, Path]] = None):
"""Base class for handling .gitignore and .amlignore files.
:param file_path: Relative path, or absolute path to the ignore file.
"""
path = Path(file_path).resolve() if file_path else None
self._path = path
self._path_spec = None
def exists(self) -> bool:
"""Checks if ignore file exists."""
return self._file_exists()
def _file_exists(self) -> bool:
return self._path and self._path.exists()
@property
def base_path(self) -> Path:
return self._path.parent
def _get_ignore_list(self) -> List[str]:
"""Get ignore list from ignore file contents."""
if not self.exists():
return []
if self._file_exists():
with open(self._path, "r") as fh:
return [line.rstrip() for line in fh if line]
return []
def _create_pathspec(self) -> List[GitWildMatchPattern]:
"""Creates path specification based on ignore list."""
return [GitWildMatchPattern(ignore) for ignore in self._get_ignore_list()]
def _get_rel_path(self, file_path: Union[str, Path]) -> Optional[str]:
"""Get relative path of given file_path."""
file_path = Path(file_path).absolute()
try:
# use os.path.relpath instead of Path.relative_to in case file_path is not a child of self.base_path
return os.path.relpath(file_path, self.base_path)
except ValueError:
# 2 paths are on different drives
return None
def is_file_excluded(self, file_path: Union[str, Path]) -> bool:
"""Checks if given file_path is excluded.
:param file_path: File path to be checked against ignore file specifications
"""
# TODO: current design of ignore file can't distinguish between files and directories of the same name
if self._path_spec is None:
self._path_spec = self._create_pathspec()
if not self._path_spec:
return False
file_path = self._get_rel_path(file_path)
if file_path is None:
return True
norm_file = normalize_file(file_path)
matched = False
for pattern in self._path_spec:
if pattern.include is not None:
if pattern.match_file(norm_file) is not None:
matched = pattern.include
return matched
@property
def path(self) -> Union[Path, str]:
return self._path
class AmlIgnoreFile(IgnoreFile):
def __init__(self, directory_path: Union[Path, str]):
file_path = Path(directory_path).joinpath(AML_IGNORE_FILE_NAME)
super(AmlIgnoreFile, self).__init__(file_path)
class GitIgnoreFile(IgnoreFile):
def __init__(self, directory_path: Union[Path, str]):
file_path = Path(directory_path).joinpath(GIT_IGNORE_FILE_NAME)
super(GitIgnoreFile, self).__init__(file_path)
def get_ignore_file(directory_path: Union[Path, str]) -> Optional[IgnoreFile]:
"""Finds and returns IgnoreFile object based on ignore file found in directory_path.
.amlignore takes precedence over .gitignore and if no file is found, an empty
IgnoreFile object will be returned.
The ignore file must be in the root directory.
:param directory_path: Path to the (root) directory where ignore file is located
"""
aml_ignore = AmlIgnoreFile(directory_path)
git_ignore = GitIgnoreFile(directory_path)
if aml_ignore.exists():
return aml_ignore
if git_ignore.exists():
return git_ignore
return IgnoreFile()
def get_upload_files_from_folder(
path: Union[str, Path], *, prefix: str = "", ignore_file: IgnoreFile = IgnoreFile()
) -> List[Tuple[str, str]]:
"""Enumerate all files in the given directory and compose paths for them to be uploaded to in the remote storage.
:param path: Path to the directory to be uploaded
:type path: str
:param prefix: Prefix for remote storage path
:type prefix: str
:param ignore_file: Ignore file object
:type ignore_file: IgnoreFile
:return: List of tuples of (local path, remote path)
:rtype: list
"""
path = Path(path)
upload_paths = []
for root, _, files in os.walk(path, followlinks=True):
upload_paths += list(
traverse_directory(
root,
files,
prefix=Path(prefix).joinpath(Path(root).relative_to(path)).as_posix(),
ignore_file=ignore_file,
)
)
return upload_paths
def traverse_directory(
root: str,
files: List[str],
*,
prefix: str,
ignore_file: IgnoreFile = IgnoreFile(),
# keep this for backward compatibility
**kwargs: Any,
) -> Iterable[Tuple[str, str]]:
"""Enumerate all files in the given directory and compose paths for them to be uploaded to in the remote storage.
e.g.
[/mnt/c/Users/dipeck/upload_files/my_file1.txt,
/mnt/c/Users/dipeck/upload_files/my_file2.txt] -->
[(/mnt/c/Users/dipeck/upload_files/my_file1.txt, LocalUpload/<guid>/upload_files/my_file1.txt),
(/mnt/c/Users/dipeck/upload_files/my_file2.txt, LocalUpload/<guid>/upload_files/my_file2.txt))]
:param root: Root directory path
:type root: str
:param files: List of all file paths in the directory
:type files: List[str]
:param prefix: Remote upload path for project directory (e.g. LocalUpload/<guid>/project_dir)
:type prefix: str
:param ignore_file: The .amlignore or .gitignore file in the project directory
:type ignore_file: azure.ai.ml._utils._asset_utils.IgnoreFile
:return: Zipped list of tuples representing the local path and remote destination path for each file
:rtype: Iterable[Tuple[str, str]]
"""
# Normalize Windows paths. Note that path should be resolved first as long part will be converted to a shortcut in
# Windows. For example, C:\Users\too-long-user-name\test will be converted to C:\Users\too-lo~1\test by default.
# Refer to https://en.wikipedia.org/wiki/8.3_filename for more details.
root = Path(root).resolve().absolute()
# filter out files excluded by the ignore file
# TODO: inner ignore file won't take effect. A merged IgnoreFile need to be generated in code resolution.
origin_file_paths = [
root.joinpath(filename)
for filename in files
if not ignore_file.is_file_excluded(root.joinpath(filename).as_posix())
]
result = []
for origin_file_path in origin_file_paths:
relative_path = origin_file_path.relative_to(root)
result.append((_resolve_path(origin_file_path).as_posix(), Path(prefix).joinpath(relative_path).as_posix()))
return result
def _resolve_path(path: Path) -> Path:
if not path.is_symlink():
return path
link_path = path.resolve()
if not link_path.is_absolute():
link_path = path.parent.joinpath(link_path).resolve()
return _resolve_path(link_path)
| promptflow/src/promptflow/promptflow/_sdk/_vendor/_asset_utils.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/_vendor/_asset_utils.py",
"repo_id": "promptflow",
"token_count": 2954
} | 15 |
# -*- mode: python ; coding: utf-8 -*-
from PyInstaller.utils.hooks import collect_data_files
from PyInstaller.utils.hooks import copy_metadata
datas = [('connections', 'connections'), ('flow', 'flow'), ('settings.json', '.'), ('main.py', '.'), ('utils.py', '.'), ('logo.png', '.'), ('{{runtime_interpreter_path}}', './streamlit/runtime')]
datas += collect_data_files('streamlit')
datas += copy_metadata('streamlit')
datas += collect_data_files('keyrings.alt', include_py_files=True)
datas += copy_metadata('keyrings.alt')
datas += collect_data_files('streamlit_quill')
block_cipher = None
a = Analysis(
['app.py', 'main.py', 'utils.py'],
pathex=[],
binaries=[],
datas=datas,
hiddenimports={{hidden_imports}},
hookspath=[],
hooksconfig={},
runtime_hooks=[],
excludes=[],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher,
noarchive=False,
)
pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
exe = EXE(
pyz,
a.scripts,
a.binaries,
a.zipfiles,
a.datas,
[],
name='app',
debug=False,
bootloader_ignore_signals=False,
strip=False,
upx=True,
upx_exclude=[],
runtime_tmpdir=None,
console=True,
disable_windowed_traceback=False,
argv_emulation=False,
target_arch=None,
codesign_identity=None,
entitlements_file=None,
) | promptflow/src/promptflow/promptflow/_sdk/data/executable/app.spec.jinja2/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/data/executable/app.spec.jinja2",
"repo_id": "promptflow",
"token_count": 575
} | 16 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
# pylint: disable=protected-access
import copy
import json
import os.path
import typing
from pathlib import Path
from typing import Dict, List, Optional
import pydash
import strictyaml
from marshmallow import ValidationError
from promptflow._utils.logger_utils import get_cli_sdk_logger
logger = get_cli_sdk_logger()
class _ValidationStatus:
"""Validation status class.
Validation status is used to indicate the status of an validation result. It can be one of the following values:
Succeeded, Failed.
"""
SUCCEEDED = "Succeeded"
"""Succeeded."""
FAILED = "Failed"
"""Failed."""
class Diagnostic(object):
"""Represents a diagnostic of an asset validation error with the location info."""
def __init__(self, yaml_path: str, message: str, error_code: str, **kwargs) -> None:
"""Init Diagnostic.
:keyword yaml_path: A dash path from root to the target element of the diagnostic.
:paramtype yaml_path: str
:keyword message: Error message of diagnostic.
:paramtype message: str
:keyword error_code: Error code of diagnostic.
:paramtype error_code: str
"""
self.yaml_path = yaml_path
self.message = message
self.error_code = error_code
self.local_path, self.value = None, None
self._key = kwargs.pop("key", "yaml_path")
# Set extra info to attribute
for k, v in kwargs.items():
if not k.startswith("_"):
setattr(self, k, v)
def __repr__(self) -> str:
"""The asset friendly name and error message.
:return: The formatted diagnostic
:rtype: str
"""
return "{}: {}".format(getattr(self, self._key), self.message)
@classmethod
def create_instance(
cls,
yaml_path: str,
message: Optional[str] = None,
error_code: Optional[str] = None,
**kwargs,
):
"""Create a diagnostic instance.
:param yaml_path: A dash path from root to the target element of the diagnostic.
:type yaml_path: str
:param message: Error message of diagnostic.
:type message: str
:param error_code: Error code of diagnostic.
:type error_code: str
:return: The created instance
:rtype: Diagnostic
"""
return cls(
yaml_path=yaml_path,
message=message,
error_code=error_code,
**kwargs,
)
class ValidationResult(object):
"""Represents the result of validation.
This class is used to organize and parse diagnostics from both client & server side before expose them. The result
is immutable.
"""
def __init__(self) -> None:
self._target_obj = None
self._errors = []
self._warnings = []
self._kwargs = {}
def _set_extra_info(self, key, value):
self._kwargs[key] = value
def _get_extra_info(self, key, default=None):
return self._kwargs.get(key, default)
@property
def error_messages(self) -> Dict:
"""
Return all messages of errors in the validation result.
:return: A dictionary of error messages. The key is the yaml path of the error, and the value is the error
message.
:rtype: dict
"""
messages = {}
for diagnostic in self._errors:
message_key = getattr(diagnostic, diagnostic._key)
if message_key not in messages:
messages[message_key] = diagnostic.message
else:
messages[message_key] += "; " + diagnostic.message
return messages
@property
def passed(self) -> bool:
"""Returns boolean indicating whether any errors were found.
:return: True if the validation passed, False otherwise.
:rtype: bool
"""
return not self._errors
def _to_dict(self) -> typing.Dict[str, typing.Any]:
result = {
"result": _ValidationStatus.SUCCEEDED if self.passed else _ValidationStatus.FAILED,
}
result.update(self._kwargs)
for diagnostic_type, diagnostics in [
("errors", self._errors),
("warnings", self._warnings),
]:
messages = []
for diagnostic in diagnostics:
message = {
"message": diagnostic.message,
"path": diagnostic.yaml_path,
"value": pydash.get(self._target_obj, diagnostic.yaml_path, diagnostic.value),
}
if diagnostic.local_path:
message["location"] = str(diagnostic.local_path)
for attr in dir(diagnostic):
if attr not in message and not attr.startswith("_") and not callable(getattr(diagnostic, attr)):
message[attr] = getattr(diagnostic, attr)
message = {k: v for k, v in message.items() if v is not None}
messages.append(message)
if messages:
result[diagnostic_type] = messages
return result
def __repr__(self) -> str:
"""Get the string representation of the validation result.
:return: The string representation
:rtype: str
"""
return json.dumps(self._to_dict(), indent=2)
class MutableValidationResult(ValidationResult):
"""Used by the client side to construct a validation result.
The result is mutable and should not be exposed to the user.
"""
def __init__(self, target_obj: Optional[typing.Dict[str, typing.Any]] = None):
super().__init__()
self._target_obj = target_obj
def merge_with(
self,
target: ValidationResult,
field_name: Optional[str] = None,
condition_skip: Optional[typing.Callable] = None,
overwrite: bool = False,
):
"""Merge errors & warnings in another validation results into current one.
Will update current validation result.
If field_name is not None, then yaml_path in the other validation result will be updated accordingly.
* => field_name, a.b => field_name.a.b e.g.. If None, then no update.
:param target: Validation result to merge.
:type target: ValidationResult
:param field_name: The base field name for the target to merge.
:type field_name: str
:param condition_skip: A function to determine whether to skip the merge of a diagnostic in the target.
:type condition_skip: typing.Callable
:param overwrite: Whether to overwrite the current validation result. If False, all diagnostics will be kept;
if True, current diagnostics with the same yaml_path will be dropped.
:type overwrite: bool
:return: The current validation result.
:rtype: MutableValidationResult
"""
for source_diagnostics, target_diagnostics in [
(target._errors, self._errors),
(target._warnings, self._warnings),
]:
if overwrite:
keys_to_remove = set(map(lambda x: x.yaml_path, source_diagnostics))
target_diagnostics[:] = [
diagnostic for diagnostic in target_diagnostics if diagnostic.yaml_path not in keys_to_remove
]
for diagnostic in source_diagnostics:
if condition_skip and condition_skip(diagnostic):
continue
new_diagnostic = copy.deepcopy(diagnostic)
if field_name:
if new_diagnostic.yaml_path == "*":
new_diagnostic.yaml_path = field_name
else:
new_diagnostic.yaml_path = field_name + "." + new_diagnostic.yaml_path
target_diagnostics.append(new_diagnostic)
return self
def try_raise(
self,
raise_error: bool = True,
*,
error_func: typing.Callable[[str, str], Exception] = None,
) -> "MutableValidationResult":
"""Try to raise an error from the validation result.
If the validation is passed or raise_error is False, this method
will return the validation result.
:param raise_error: Whether to raise the error.
:type raise_error: bool
:keyword error_func: A function to create the error. If None, a marshmallow.ValidationError will be created.
The first parameter of the function is the string representation of the validation result,
and the second parameter is the error message without personal data.
:type error_func: typing.Callable[[str, str], Exception]
:return: The current validation result.
:rtype: MutableValidationResult
"""
# pylint: disable=logging-not-lazy
if raise_error is False:
return self
if self._warnings:
logger.warning("Schema validation warnings: %s" % str(self._warnings))
if not self.passed:
if error_func is None:
def error_func(msg, _):
return ValidationError(message=msg)
raise error_func(
self.__repr__(),
f"Schema validation failed: {self.error_messages}",
)
return self
def append_error(
self,
yaml_path: str = "*",
message: Optional[str] = None,
error_code: Optional[str] = None,
**kwargs,
):
"""Append an error to the validation result.
:param yaml_path: The yaml path of the error.
:type yaml_path: str
:param message: The message of the error.
:type message: str
:param error_code: The error code of the error.
:type error_code: str
:return: The current validation result.
:rtype: MutableValidationResult
"""
self._errors.append(
Diagnostic.create_instance(
yaml_path=yaml_path,
message=message,
error_code=error_code,
**kwargs,
)
)
return self
def resolve_location_for_diagnostics(self, source_path: str, resolve_value: bool = False):
"""Resolve location/value for diagnostics based on the source path where the validatable object is loaded.
Location includes local path of the exact file (can be different from the source path) & line number of the
invalid field. Value of a diagnostic is resolved from the validatable object in transfering to a dict by
default; however, when the validatable object is not available for the validation result, validation result is
created from marshmallow.ValidationError.messages e.g., it can be resolved from the source path.
:param source_path: The path of the source file.
:type source_path: str
:param resolve_value: Whether to resolve the value of the invalid field from source file.
:type resolve_value: bool
"""
resolver = _YamlLocationResolver(source_path)
for diagnostic in self._errors + self._warnings:
diagnostic.local_path, value = resolver.resolve(diagnostic.yaml_path)
if value is not None and resolve_value:
diagnostic.value = value
def append_warning(
self,
yaml_path: str = "*",
message: Optional[str] = None,
error_code: Optional[str] = None,
**kwargs,
):
"""Append a warning to the validation result.
:param yaml_path: The yaml path of the warning.
:type yaml_path: str
:param message: The message of the warning.
:type message: str
:param error_code: The error code of the warning.
:type error_code: str
:return: The current validation result.
:rtype: MutableValidationResult
"""
self._warnings.append(
Diagnostic.create_instance(
yaml_path=yaml_path,
message=message,
error_code=error_code,
**kwargs,
)
)
return self
class ValidationResultBuilder:
"""A helper class to create a validation result."""
UNKNOWN_MESSAGE = "Unknown field."
def __init__(self):
pass
@classmethod
def success(cls) -> MutableValidationResult:
"""Create a validation result with success status.
:return: A validation result
:rtype: MutableValidationResult
"""
return MutableValidationResult()
@classmethod
def from_single_message(
cls, singular_error_message: Optional[str] = None, yaml_path: str = "*", data: Optional[dict] = None
):
"""Create a validation result with only 1 diagnostic.
:param singular_error_message: diagnostic.message.
:type singular_error_message: Optional[str]
:param yaml_path: diagnostic.yaml_path.
:type yaml_path: str
:param data: serializedvalidation target.
:type data: Optional[Dict]
:return: The validation result
:rtype: MutableValidationResult
"""
obj = MutableValidationResult(target_obj=data)
if singular_error_message:
obj.append_error(message=singular_error_message, yaml_path=yaml_path)
return obj
@classmethod
def from_validation_error(
cls, error: ValidationError, *, source_path: Optional[str] = None, error_on_unknown_field=False
) -> MutableValidationResult:
"""Create a validation result from a ValidationError, which will be raised in marshmallow.Schema.load. Please
use this function only for exception in loading file.
:param error: ValidationError raised by marshmallow.Schema.load.
:type error: ValidationError
:keyword error_on_unknown_field: whether to raise error if there are unknown field diagnostics.
:paramtype error_on_unknown_field: bool
:return: The validation result
:rtype: MutableValidationResult
"""
obj = cls.from_validation_messages(
error.messages, data=error.data, error_on_unknown_field=error_on_unknown_field
)
if source_path:
obj.resolve_location_for_diagnostics(source_path, resolve_value=True)
return obj
@classmethod
def from_validation_messages(
cls, errors: typing.Dict, data: typing.Dict, *, error_on_unknown_field: bool = False
) -> MutableValidationResult:
"""Create a validation result from error messages, which will be returned by marshmallow.Schema.validate.
:param errors: error message returned by marshmallow.Schema.validate.
:type errors: dict
:param data: serialized data to validate
:type data: dict
:keyword error_on_unknown_field: whether to raise error if there are unknown field diagnostics.
:paramtype error_on_unknown_field: bool
:return: The validation result
:rtype: MutableValidationResult
"""
instance = MutableValidationResult(target_obj=data)
errors = copy.deepcopy(errors)
cls._from_validation_messages_recursively(errors, [], instance, error_on_unknown_field=error_on_unknown_field)
return instance
@classmethod
def _from_validation_messages_recursively(
cls,
errors: typing.Union[typing.Dict, typing.List, str],
path_stack: typing.List[str],
instance: MutableValidationResult,
error_on_unknown_field: bool,
):
cur_path = ".".join(path_stack) if path_stack else "*"
# single error message
if isinstance(errors, dict) and "_schema" in errors:
instance.append_error(
message=";".join(errors["_schema"]),
yaml_path=cur_path,
)
# errors on attributes
elif isinstance(errors, dict):
for field, msgs in errors.items():
# fields.Dict
if field in ["key", "value"]:
cls._from_validation_messages_recursively(msgs, path_stack, instance, error_on_unknown_field)
else:
# Todo: Add hack logic here to deal with error message in nested TypeSensitiveUnionField in
# DataTransfer: will be a nested dict with None field as dictionary key.
# open a item to track: https://msdata.visualstudio.com/Vienna/_workitems/edit/2244262/
if field is None:
cls._from_validation_messages_recursively(msgs, path_stack, instance, error_on_unknown_field)
else:
path_stack.append(field)
cls._from_validation_messages_recursively(msgs, path_stack, instance, error_on_unknown_field)
path_stack.pop()
# detailed error message
elif isinstance(errors, list) and all(isinstance(msg, str) for msg in errors):
if cls.UNKNOWN_MESSAGE in errors and not error_on_unknown_field:
# Unknown field is not a real error, so we should remove it and append a warning.
errors.remove(cls.UNKNOWN_MESSAGE)
instance.append_warning(message=cls.UNKNOWN_MESSAGE, yaml_path=cur_path)
if errors:
instance.append_error(message=";".join(errors), yaml_path=cur_path)
# union field
elif isinstance(errors, list):
def msg2str(msg):
if isinstance(msg, str):
return msg
if isinstance(msg, dict) and len(msg) == 1 and "_schema" in msg and len(msg["_schema"]) == 1:
return msg["_schema"][0]
return str(msg)
instance.append_error(message="; ".join([msg2str(x) for x in errors]), yaml_path=cur_path)
# unknown error
else:
instance.append_error(message=str(errors), yaml_path=cur_path)
class _YamlLocationResolver:
def __init__(self, source_path):
self._source_path = source_path
def resolve(self, yaml_path, source_path=None):
"""Resolve the location & value of a yaml path starting from source_path.
:param yaml_path: yaml path.
:type yaml_path: str
:param source_path: source path.
:type source_path: str
:return: the location & value of the yaml path based on source_path.
:rtype: Tuple[str, str]
"""
source_path = source_path or self._source_path
if source_path is None or not os.path.isfile(source_path):
return None, None
if yaml_path is None or yaml_path == "*":
return source_path, None
attrs = yaml_path.split(".")
attrs.reverse()
return self._resolve_recursively(attrs, Path(source_path))
def _resolve_recursively(self, attrs: List[str], source_path: Path):
with open(source_path, encoding="utf-8") as f:
try:
loaded_yaml = strictyaml.load(f.read())
except Exception as e: # pylint: disable=broad-except
msg = "Can't load source file %s as a strict yaml:\n%s" % (source_path, str(e))
logger.debug(msg)
return None, None
while attrs:
attr = attrs[-1]
if loaded_yaml.is_mapping() and attr in loaded_yaml:
loaded_yaml = loaded_yaml.get(attr)
attrs.pop()
elif loaded_yaml.is_sequence() and attr.isdigit() and 0 <= int(attr) < len(loaded_yaml):
loaded_yaml = loaded_yaml[int(attr)]
attrs.pop()
else:
try:
# if current object is a path of a valid yaml file, try to resolve location in new source file
next_path = Path(loaded_yaml.value)
if not next_path.is_absolute():
next_path = source_path.parent / next_path
if next_path.is_file():
return self._resolve_recursively(attrs, source_path=next_path)
except OSError:
pass
except TypeError:
pass
# if not, return current section
break
return (
f"{source_path.resolve().absolute()}#line {loaded_yaml.start_line}",
None if attrs else loaded_yaml.value,
)
| promptflow/src/promptflow/promptflow/_sdk/entities/_validation/core.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/entities/_validation/core.py",
"repo_id": "promptflow",
"token_count": 9020
} | 17 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from marshmallow import fields, post_load, pre_load
from promptflow._sdk._constants import ExperimentNodeType
from promptflow._sdk.schemas._base import PatchedSchemaMeta, YamlFileSchema
from promptflow._sdk.schemas._fields import (
LocalPathField,
NestedField,
PrimitiveValueField,
StringTransformedEnum,
UnionField,
)
from promptflow._sdk.schemas._run import RunSchema
class CommandNodeSchema(YamlFileSchema):
# TODO: Not finalized now. Need to revisit.
name = fields.Str(required=True)
display_name = fields.Str()
type = StringTransformedEnum(allowed_values=ExperimentNodeType.COMMAND, required=True)
code = LocalPathField()
command = fields.Str(required=True)
inputs = fields.Dict(keys=fields.Str)
outputs = fields.Dict(keys=fields.Str, values=LocalPathField(allow_none=True))
environment_variables = fields.Dict(keys=fields.Str, values=fields.Str)
# runtime field, only available for cloud run
runtime = fields.Str() # TODO: Revisit the required fields
class FlowNodeSchema(RunSchema):
class Meta:
exclude = ["flow", "column_mapping", "data", "run"]
name = fields.Str(required=True)
type = StringTransformedEnum(allowed_values=ExperimentNodeType.FLOW, required=True)
inputs = fields.Dict(keys=fields.Str)
path = UnionField([LocalPathField(required=True), fields.Str(required=True)])
@pre_load
def warning_unknown_fields(self, data, **kwargs):
# Override to avoid warning here
return data
class ExperimentDataSchema(metaclass=PatchedSchemaMeta):
name = fields.Str(required=True)
path = LocalPathField(required=True)
class ExperimentInputSchema(metaclass=PatchedSchemaMeta):
name = fields.Str(required=True)
type = fields.Str(required=True)
default = PrimitiveValueField()
class ExperimentTemplateSchema(YamlFileSchema):
description = fields.Str()
data = fields.List(NestedField(ExperimentDataSchema)) # Optional
inputs = fields.List(NestedField(ExperimentInputSchema)) # Optional
nodes = fields.List(
UnionField(
[
NestedField(CommandNodeSchema),
NestedField(FlowNodeSchema),
]
),
required=True,
)
@post_load
def resolve_nodes(self, data, **kwargs):
from promptflow._sdk.entities._experiment import CommandNode, FlowNode
nodes = data.get("nodes", [])
resolved_nodes = []
for node in nodes:
if not isinstance(node, dict):
continue
node_type = node.get("type", None)
if node_type == ExperimentNodeType.FLOW:
resolved_nodes.append(FlowNode._load_from_dict(data=node, context=self.context, additional_message=""))
elif node_type == ExperimentNodeType.COMMAND:
resolved_nodes.append(
CommandNode._load_from_dict(data=node, context=self.context, additional_message="")
)
else:
raise ValueError(f"Unknown node type {node_type} for node {node}.")
data["nodes"] = resolved_nodes
return data
@post_load
def resolve_data_and_inputs(self, data, **kwargs):
from promptflow._sdk.entities._experiment import ExperimentData, ExperimentInput
def resolve_resource(key, cls):
items = data.get(key, [])
resolved_result = []
for item in items:
if not isinstance(item, dict):
continue
resolved_result.append(
cls._load_from_dict(
data=item,
context=self.context,
additional_message=f"Failed to load {cls.__name__}",
)
)
return resolved_result
data["data"] = resolve_resource("data", ExperimentData)
data["inputs"] = resolve_resource("inputs", ExperimentInput)
return data
class ExperimentSchema(ExperimentTemplateSchema):
name = fields.Str()
node_runs = fields.Dict(keys=fields.Str(), values=fields.Str()) # TODO: Revisit this
status = fields.Str(dump_only=True)
properties = fields.Dict(keys=fields.Str(), values=fields.Str(allow_none=True))
created_on = fields.Str(dump_only=True)
last_start_time = fields.Str(dump_only=True)
last_end_time = fields.Str(dump_only=True)
| promptflow/src/promptflow/promptflow/_sdk/schemas/_experiment.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/schemas/_experiment.py",
"repo_id": "promptflow",
"token_count": 1860
} | 18 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import hashlib
import os
from os import PathLike
from pathlib import Path
from typing import Union
from promptflow._sdk._constants import DAG_FILE_NAME, DEFAULT_ENCODING
from promptflow._utils.logger_utils import LoggerFactory
from promptflow._utils.yaml_utils import dump_yaml, load_yaml
logger = LoggerFactory.get_logger(name=__name__)
def get_flow_lineage_id(flow_dir: Union[str, PathLike]):
"""
Get the lineage id for flow. The flow lineage id will be same for same flow in same GIT repo or device.
If the flow locates in GIT repo:
use Repo name + relative path to flow_dir as session id
Otherwise:
use device id + absolute path to flow_dir as session id
:param flow_dir: flow directory
"""
flow_dir = Path(flow_dir).resolve()
if not flow_dir.is_dir():
flow_dir = flow_dir.parent
try:
from git import Repo
repo = Repo(flow_dir, search_parent_directories=True)
lineage_id = f"{os.path.basename(repo.working_dir)}/{flow_dir.relative_to(repo.working_dir).as_posix()}"
logger.debug("Got lineage id %s from git repo.", lineage_id)
except Exception:
# failed to get repo, use device id + absolute path to flow_dir as session id
import uuid
device_id = uuid.getnode()
lineage_id = f"{device_id}/{flow_dir.absolute().as_posix()}"
logger.debug("Got lineage id %s from local since failed to get git info.", lineage_id)
# hash the value to avoid it gets too long, and it's not user visible.
lineage_id = hashlib.sha256(lineage_id.encode()).hexdigest()
return lineage_id
def resolve_flow_path(flow_path: Path):
"""Resolve given flow path to dag file path."""
if flow_path.is_dir():
flow_path = flow_path / DAG_FILE_NAME
return flow_path
def load_flow_dag(flow_path: Path):
"""Load flow dag from given flow path."""
flow_path = resolve_flow_path(flow_path)
if not flow_path.exists():
raise FileNotFoundError(f"Flow file {flow_path} not found")
with open(flow_path, "r", encoding=DEFAULT_ENCODING) as f:
flow_dag = load_yaml(f)
return flow_path, flow_dag
def dump_flow_dag(flow_dag: dict, flow_path: Path):
"""Dump flow dag to given flow path."""
flow_path = resolve_flow_path(flow_path)
with open(flow_path, "w", encoding=DEFAULT_ENCODING) as f:
dump_yaml(flow_dag, f)
return flow_path
| promptflow/src/promptflow/promptflow/_utils/flow_utils.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_utils/flow_utils.py",
"repo_id": "promptflow",
"token_count": 949
} | 19 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
from ._component import COMMAND_COMPONENT_SPEC_TEMPLATE, DEFAULT_PYTHON_VERSION
from ._flow import FlowJobType, FlowType
__all__ = ["FlowJobType", "FlowType", "DEFAULT_PYTHON_VERSION", "COMMAND_COMPONENT_SPEC_TEMPLATE"]
| promptflow/src/promptflow/promptflow/azure/_constants/__init__.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/azure/_constants/__init__.py",
"repo_id": "promptflow",
"token_count": 138
} | 20 |
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.2, generator: @autorest/[email protected])
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._trace_sessions_operations import build_attach_cosmos_account_request, build_get_cosmos_resource_token_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class TraceSessionsOperations:
"""TraceSessionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~flow.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def attach_cosmos_account(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
overwrite: Optional[bool] = False,
body: Optional["_models.AttachCosmosRequest"] = None,
**kwargs: Any
) -> Any:
"""attach_cosmos_account.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param overwrite:
:type overwrite: bool
:param body:
:type body: ~flow.models.AttachCosmosRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: any, or the result of cls(response)
:rtype: any
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Any]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if body is not None:
_json = self._serialize.body(body, 'AttachCosmosRequest')
else:
_json = None
request = build_attach_cosmos_account_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
content_type=content_type,
json=_json,
overwrite=overwrite,
template_url=self.attach_cosmos_account.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('object', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
attach_cosmos_account.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/TraceSessions/attachDb'} # type: ignore
@distributed_trace_async
async def get_cosmos_resource_token(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
container_name: str,
acquire_write: Optional[bool] = False,
**kwargs: Any
) -> str:
"""get_cosmos_resource_token.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param container_name:
:type container_name: str
:param acquire_write:
:type acquire_write: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_cosmos_resource_token_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
container_name=container_name,
acquire_write=acquire_write,
template_url=self.get_cosmos_resource_token.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_cosmos_resource_token.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/TraceSessions/container/{containerName}/resourceToken'} # type: ignore
| promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_trace_sessions_operations.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_trace_sessions_operations.py",
"repo_id": "promptflow",
"token_count": 2814
} | 21 |
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.2, generator: @autorest/[email protected])
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
# fmt: off
def build_get_tool_setting_request(
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/setting')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_get_samples_request(
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/samples')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_get_tool_meta_request(
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
content_type = kwargs.pop('content_type', None) # type: Optional[str]
tool_name = kwargs.pop('tool_name') # type: str
tool_type = kwargs.pop('tool_type') # type: str
endpoint_name = kwargs.pop('endpoint_name', None) # type: Optional[str]
flow_runtime_name = kwargs.pop('flow_runtime_name', None) # type: Optional[str]
flow_id = kwargs.pop('flow_id', None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/meta')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['toolName'] = _SERIALIZER.query("tool_name", tool_name, 'str')
query_parameters['toolType'] = _SERIALIZER.query("tool_type", tool_type, 'str')
if endpoint_name is not None:
query_parameters['endpointName'] = _SERIALIZER.query("endpoint_name", endpoint_name, 'str')
if flow_runtime_name is not None:
query_parameters['flowRuntimeName'] = _SERIALIZER.query("flow_runtime_name", flow_runtime_name, 'str')
if flow_id is not None:
query_parameters['flowId'] = _SERIALIZER.query("flow_id", flow_id, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_tool_meta_v2_request(
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
content_type = kwargs.pop('content_type', None) # type: Optional[str]
flow_runtime_name = kwargs.pop('flow_runtime_name', None) # type: Optional[str]
flow_id = kwargs.pop('flow_id', None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/meta-v2')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if flow_runtime_name is not None:
query_parameters['flowRuntimeName'] = _SERIALIZER.query("flow_runtime_name", flow_runtime_name, 'str')
if flow_id is not None:
query_parameters['flowId'] = _SERIALIZER.query("flow_id", flow_id, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_package_tools_request(
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
flow_runtime_name = kwargs.pop('flow_runtime_name', None) # type: Optional[str]
flow_id = kwargs.pop('flow_id', None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/packageTools')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if flow_runtime_name is not None:
query_parameters['flowRuntimeName'] = _SERIALIZER.query("flow_runtime_name", flow_runtime_name, 'str')
if flow_id is not None:
query_parameters['flowId'] = _SERIALIZER.query("flow_id", flow_id, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_dynamic_list_request(
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
content_type = kwargs.pop('content_type', None) # type: Optional[str]
flow_runtime_name = kwargs.pop('flow_runtime_name', None) # type: Optional[str]
flow_id = kwargs.pop('flow_id', None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/dynamicList')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if flow_runtime_name is not None:
query_parameters['flowRuntimeName'] = _SERIALIZER.query("flow_runtime_name", flow_runtime_name, 'str')
if flow_id is not None:
query_parameters['flowId'] = _SERIALIZER.query("flow_id", flow_id, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_retrieve_tool_func_result_request(
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
content_type = kwargs.pop('content_type', None) # type: Optional[str]
flow_runtime_name = kwargs.pop('flow_runtime_name', None) # type: Optional[str]
flow_id = kwargs.pop('flow_id', None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/RetrieveToolFuncResult')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if flow_runtime_name is not None:
query_parameters['flowRuntimeName'] = _SERIALIZER.query("flow_runtime_name", flow_runtime_name, 'str')
if flow_id is not None:
query_parameters['flowId'] = _SERIALIZER.query("flow_id", flow_id, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
# fmt: on
class ToolsOperations(object):
"""ToolsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~flow.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get_tool_setting(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ToolSetting"
"""get_tool_setting.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ToolSetting, or the result of cls(response)
:rtype: ~flow.models.ToolSetting
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ToolSetting"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_tool_setting_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
template_url=self.get_tool_setting.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ToolSetting', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_tool_setting.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/setting'} # type: ignore
@distributed_trace
def get_samples(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Dict[str, "_models.Tool"]
"""get_samples.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: dict mapping str to Tool, or the result of cls(response)
:rtype: dict[str, ~flow.models.Tool]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, "_models.Tool"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_samples_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
template_url=self.get_samples.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('{Tool}', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_samples.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/samples'} # type: ignore
@distributed_trace
def get_tool_meta(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
tool_name, # type: str
tool_type, # type: str
endpoint_name=None, # type: Optional[str]
flow_runtime_name=None, # type: Optional[str]
flow_id=None, # type: Optional[str]
data=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> str
"""get_tool_meta.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param tool_name:
:type tool_name: str
:param tool_type:
:type tool_type: str
:param endpoint_name:
:type endpoint_name: str
:param flow_runtime_name:
:type flow_runtime_name: str
:param flow_id:
:type flow_id: str
:param data:
:type data: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "text/plain") # type: Optional[str]
_content = data
request = build_get_tool_meta_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
content_type=content_type,
tool_name=tool_name,
tool_type=tool_type,
content=_content,
endpoint_name=endpoint_name,
flow_runtime_name=flow_runtime_name,
flow_id=flow_id,
template_url=self.get_tool_meta.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_tool_meta.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/meta'} # type: ignore
@distributed_trace
def get_tool_meta_v2(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
flow_runtime_name=None, # type: Optional[str]
flow_id=None, # type: Optional[str]
body=None, # type: Optional["_models.GenerateToolMetaRequest"]
**kwargs # type: Any
):
# type: (...) -> "_models.ToolMetaDto"
"""get_tool_meta_v2.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param flow_runtime_name:
:type flow_runtime_name: str
:param flow_id:
:type flow_id: str
:param body:
:type body: ~flow.models.GenerateToolMetaRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ToolMetaDto, or the result of cls(response)
:rtype: ~flow.models.ToolMetaDto
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ToolMetaDto"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if body is not None:
_json = self._serialize.body(body, 'GenerateToolMetaRequest')
else:
_json = None
request = build_get_tool_meta_v2_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
content_type=content_type,
json=_json,
flow_runtime_name=flow_runtime_name,
flow_id=flow_id,
template_url=self.get_tool_meta_v2.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ToolMetaDto', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_tool_meta_v2.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/meta-v2'} # type: ignore
@distributed_trace
def get_package_tools(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
flow_runtime_name=None, # type: Optional[str]
flow_id=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Dict[str, "_models.Tool"]
"""get_package_tools.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param flow_runtime_name:
:type flow_runtime_name: str
:param flow_id:
:type flow_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: dict mapping str to Tool, or the result of cls(response)
:rtype: dict[str, ~flow.models.Tool]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, "_models.Tool"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_package_tools_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
flow_runtime_name=flow_runtime_name,
flow_id=flow_id,
template_url=self.get_package_tools.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('{Tool}', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_package_tools.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/packageTools'} # type: ignore
@distributed_trace
def get_dynamic_list(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
flow_runtime_name=None, # type: Optional[str]
flow_id=None, # type: Optional[str]
body=None, # type: Optional["_models.GetDynamicListRequest"]
**kwargs # type: Any
):
# type: (...) -> List[Any]
"""get_dynamic_list.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param flow_runtime_name:
:type flow_runtime_name: str
:param flow_id:
:type flow_id: str
:param body:
:type body: ~flow.models.GetDynamicListRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of any, or the result of cls(response)
:rtype: list[any]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[Any]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if body is not None:
_json = self._serialize.body(body, 'GetDynamicListRequest')
else:
_json = None
request = build_get_dynamic_list_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
content_type=content_type,
json=_json,
flow_runtime_name=flow_runtime_name,
flow_id=flow_id,
template_url=self.get_dynamic_list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('[object]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_dynamic_list.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/dynamicList'} # type: ignore
@distributed_trace
def retrieve_tool_func_result(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
flow_runtime_name=None, # type: Optional[str]
flow_id=None, # type: Optional[str]
body=None, # type: Optional["_models.RetrieveToolFuncResultRequest"]
**kwargs # type: Any
):
# type: (...) -> "_models.ToolFuncResponse"
"""retrieve_tool_func_result.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param flow_runtime_name:
:type flow_runtime_name: str
:param flow_id:
:type flow_id: str
:param body:
:type body: ~flow.models.RetrieveToolFuncResultRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ToolFuncResponse, or the result of cls(response)
:rtype: ~flow.models.ToolFuncResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ToolFuncResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if body is not None:
_json = self._serialize.body(body, 'RetrieveToolFuncResultRequest')
else:
_json = None
request = build_retrieve_tool_func_result_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
content_type=content_type,
json=_json,
flow_runtime_name=flow_runtime_name,
flow_id=flow_id,
template_url=self.retrieve_tool_func_result.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ToolFuncResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
retrieve_tool_func_result.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/RetrieveToolFuncResult'} # type: ignore
| promptflow/src/promptflow/promptflow/azure/_restclient/flow/operations/_tools_operations.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/azure/_restclient/flow/operations/_tools_operations.py",
"repo_id": "promptflow",
"token_count": 13696
} | 22 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import os
from collections import defaultdict
from functools import cached_property
from multiprocessing import Lock
from pathlib import Path
from typing import Any, Dict, Optional
from azure.ai.ml._artifacts._fileshare_storage_helper import FileStorageClient
from azure.ai.ml._utils._asset_utils import (
DirectoryUploadProgressBar,
FileUploadProgressBar,
IgnoreFile,
get_directory_size,
)
from azure.core.exceptions import ResourceExistsError
from azure.storage.fileshare import DirectoryProperties, ShareDirectoryClient
from promptflow._sdk._vendor import get_upload_files_from_folder
from promptflow.azure._constants._flow import PROMPTFLOW_FILE_SHARE_DIR
from promptflow.azure._utils.gerneral import get_user_alias_from_credential
uploading_lock = defaultdict(Lock)
class FlowFileStorageClient(FileStorageClient):
def __init__(self, credential: str, file_share_name: str, account_url: str, azure_cred):
super().__init__(credential=credential, file_share_name=file_share_name, account_url=account_url)
try:
user_alias = get_user_alias_from_credential(azure_cred)
except Exception:
# fall back to unknown user when failed to get credential.
user_alias = "unknown_user"
self._user_alias = user_alias
# TODO: update this after we finalize the design for flow file storage client
# create user folder if not exist
for directory_path in ["Users", f"Users/{user_alias}", f"Users/{user_alias}/{PROMPTFLOW_FILE_SHARE_DIR}"]:
self.directory_client = ShareDirectoryClient(
account_url=account_url,
credential=credential,
share_name=file_share_name,
directory_path=directory_path,
)
# try to create user folder if not exist
try:
self.directory_client.create_directory()
except ResourceExistsError:
pass
@cached_property
def file_share_prefix(self) -> str:
return f"Users/{self._user_alias}/{PROMPTFLOW_FILE_SHARE_DIR}"
def upload(
self,
source: str,
name: str,
version: str,
ignore_file: IgnoreFile = IgnoreFile(None),
asset_hash: Optional[str] = None,
show_progress: bool = True,
) -> Dict[str, str]:
"""Upload a file or directory to a path inside the file system."""
source_name = Path(source).name
dest = asset_hash
# truncate path longer than 50 chars for terminal display
if show_progress and len(source_name) >= 50:
formatted_path = "{:.47}".format(source_name) + "..."
else:
formatted_path = source_name
msg = f"Uploading {formatted_path}"
# lock to prevent concurrent uploading of the same file or directory
with uploading_lock[self.directory_client.directory_path + "/" + dest]:
# start upload
if os.path.isdir(source):
subdir = self.directory_client.get_subdirectory_client(dest)
if not subdir.exists():
# directory is uploaded based on asset hash for now, so skip uploading if subdir exists
self.upload_dir(
source,
dest,
msg=msg,
show_progress=show_progress,
ignore_file=ignore_file,
)
else:
self.upload_file(source, dest=dest, msg=msg, show_progress=show_progress)
artifact_info = {"remote path": dest, "name": name, "version": version}
return artifact_info
def upload_file(
self,
source: str,
dest: str,
show_progress: Optional[bool] = None,
msg: Optional[str] = None,
in_directory: bool = False,
subdirectory_client: Optional[ShareDirectoryClient] = None,
callback: Optional[Any] = None,
) -> None:
""" " Upload a single file to a path inside the file system
directory."""
validate_content = os.stat(source).st_size > 0 # don't do checksum for empty files
# relative path from root
relative_path = Path(subdirectory_client.directory_path).relative_to(self.directory_client.directory_path)
dest = Path(dest).relative_to(relative_path).as_posix()
if "/" in dest:
# dest is a folder, need to switch subdirectory client
dest_dir, dest = dest.rsplit("/", 1)
subdirectory_client = subdirectory_client.get_subdirectory_client(dest_dir)
with open(source, "rb") as data:
if in_directory:
file_name = dest.rsplit("/")[-1]
if show_progress:
subdirectory_client.upload_file(
file_name=file_name,
data=data,
validate_content=validate_content,
raw_response_hook=callback,
)
else:
subdirectory_client.upload_file(
file_name=file_name,
data=data,
validate_content=validate_content,
)
else:
if show_progress:
with FileUploadProgressBar(msg=msg) as progress_bar:
self.directory_client.upload_file(
file_name=dest,
data=data,
validate_content=validate_content,
raw_response_hook=progress_bar.update_to,
)
else:
self.directory_client.upload_file(file_name=dest, data=data, validate_content=validate_content)
self.uploaded_file_count = self.uploaded_file_count + 1
def upload_dir(
self,
source: str,
dest: str,
msg: str,
show_progress: bool,
ignore_file: IgnoreFile,
) -> None:
"""Upload a directory to a path inside the fileshare directory."""
subdir = self.directory_client.create_subdirectory(dest)
source_path = Path(source).resolve()
prefix = dest + "/"
upload_paths = get_upload_files_from_folder(
path=source_path,
prefix=prefix,
ignore_file=ignore_file,
)
upload_paths = sorted(upload_paths)
self.total_file_count = len(upload_paths)
# travers all directories recursively and create them in the fileshare
def travers_recursively(child_dir, source_dir):
for item in os.listdir(source_dir):
item_path = os.path.join(source_dir, item)
if os.path.isdir(item_path):
new_dir = child_dir.create_subdirectory(item)
travers_recursively(new_dir, item_path)
travers_recursively(child_dir=subdir, source_dir=source)
if show_progress:
with DirectoryUploadProgressBar(dir_size=get_directory_size(source_path), msg=msg) as progress_bar:
for src, destination in upload_paths:
self.upload_file(
src,
destination,
in_directory=True,
subdirectory_client=subdir,
show_progress=show_progress,
callback=progress_bar.update_to,
)
else:
for src, destination in upload_paths:
self.upload_file(
src,
destination,
in_directory=True,
subdirectory_client=subdir,
show_progress=show_progress,
)
def _check_file_share_directory_exist(self, dest) -> bool:
"""Check if the file share directory exists."""
return self.directory_client.get_subdirectory_client(dest).exists()
def _check_file_share_file_exist(self, dest) -> bool:
"""Check if the file share directory exists."""
if dest.startswith(self.file_share_prefix):
dest = dest.replace(f"{self.file_share_prefix}/", "")
file_client = self.directory_client.get_file_client(dest)
try:
file_client.get_file_properties()
except Exception:
return False
return True
def _delete_file_share_directory(self, dir_client) -> None:
"""Recursively delete a directory with content in the file share."""
for item in dir_client.list_directories_and_files():
if isinstance(item, DirectoryProperties):
self._delete_file_share_directory(dir_client.get_subdirectory_client(item.name))
else:
dir_client.delete_file(item.name)
dir_client.delete_directory()
| promptflow/src/promptflow/promptflow/azure/operations/_fileshare_storeage_helper.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/azure/operations/_fileshare_storeage_helper.py",
"repo_id": "promptflow",
"token_count": 4252
} | 23 |
from promptflow.exceptions import UserErrorException
class FailedToImportModule(UserErrorException):
pass
class FlowDefinitionError(UserErrorException):
pass
| promptflow/src/promptflow/promptflow/contracts/_errors.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/contracts/_errors.py",
"repo_id": "promptflow",
"token_count": 44
} | 24 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from jinja2 import TemplateSyntaxError
from promptflow._utils.exception_utils import ExceptionPresenter, infer_error_code_from_class, remove_suffix
from promptflow.exceptions import (
ErrorTarget,
PromptflowException,
SystemErrorException,
UserErrorException,
ValidationException,
)
class InvalidCustomLLMTool(ValidationException):
"""Exception raised when package tool definition is wrong."""
pass
class ValueTypeUnresolved(ValidationException):
pass
class ToolValidationError(ValidationException):
def __init__(
self,
target: ErrorTarget = ErrorTarget.EXECUTOR,
**kwargs,
):
super().__init__(
target=target,
**kwargs,
)
class InvalidRequest(ValidationException):
def __init__(
self,
target: ErrorTarget = ErrorTarget.EXECUTOR,
**kwargs,
):
super().__init__(
target=target,
**kwargs,
)
class ConnectionNotFound(InvalidRequest):
pass
class InvalidBulkTestRequest(ValidationException):
def __init__(
self,
target: ErrorTarget = ErrorTarget.EXECUTOR,
**kwargs,
):
super().__init__(
target=target,
**kwargs,
)
class InvalidFlowRequest(ValidationException):
def __init__(
self,
target: ErrorTarget = ErrorTarget.EXECUTOR,
**kwargs,
):
super().__init__(
target=target,
**kwargs,
)
class NodeInputValidationError(InvalidFlowRequest):
pass
class DuplicateNodeName(InvalidFlowRequest):
pass
class EmptyOutputReference(InvalidFlowRequest):
pass
class OutputReferenceNotFound(InvalidFlowRequest):
pass
class InputReferenceNotFound(InvalidFlowRequest):
pass
class InputNotFound(InvalidFlowRequest):
pass
class InvalidAggregationInput(SystemErrorException):
pass
class InputNotFoundFromAncestorNodeOutput(SystemErrorException):
pass
class NoNodeExecutedError(SystemErrorException):
pass
class InputTypeError(InvalidFlowRequest):
pass
class InputParseError(InvalidFlowRequest):
pass
class InvalidConnectionType(InvalidFlowRequest):
pass
class NodeReferenceNotFound(InvalidFlowRequest):
pass
class NodeCircularDependency(InvalidFlowRequest):
pass
class InvalidNodeReference(InvalidFlowRequest):
pass
class NodeReferenceError(UserErrorException):
"""Exception raised when node reference not found or unsupported"""
pass
class UnsupportedReference(NodeReferenceError):
pass
class InvalidReferenceProperty(NodeReferenceError):
pass
class OutputReferenceNotExist(NodeReferenceError):
pass
class NodeOutputNotFound(UserErrorException):
pass
class SingleNodeValidationError(UserErrorException):
pass
class LineExecutionTimeoutError(UserErrorException):
"""Exception raised when single line execution timeout"""
def __init__(self, line_number, timeout):
super().__init__(
message_format="Line {line_number} execution timeout for exceeding {timeout} seconds",
line_number=line_number,
timeout=timeout,
target=ErrorTarget.EXECUTOR,
)
class BatchExecutionTimeoutError(UserErrorException):
"""Exception raised when batch timeout is exceeded"""
def __init__(self, line_number, timeout):
super().__init__(
message_format=(
"Line {line_number} execution terminated due to the "
"total batch run exceeding the batch timeout ({timeout}s)."
),
line_number=line_number,
timeout=timeout,
target=ErrorTarget.BATCH,
)
class ProcessCrashError(UserErrorException):
"""Exception raised when process crashed."""
def __init__(self, line_number):
super().__init__(message=f"Process crashed while executing line {line_number},", target=ErrorTarget.EXECUTOR)
class ProcessTerminatedTimeout(SystemErrorException):
"""Exception raised when process not terminated within a period of time."""
def __init__(self, timeout):
super().__init__(message=f"Process has not terminated after {timeout} seconds", target=ErrorTarget.EXECUTOR)
class ProcessInfoObtainedTimeout(SystemErrorException):
"""Exception raised when process info not obtained within a period of time."""
def __init__(self, timeout):
super().__init__(message=f"Failed to get process info after {timeout} seconds", target=ErrorTarget.EXECUTOR)
class SpawnedForkProcessManagerStartFailure(SystemErrorException):
"""Exception raised when failed to start spawned fork process manager."""
def __init__(self):
super().__init__(message="Failed to start spawned fork process manager", target=ErrorTarget.EXECUTOR)
class EmptyLLMApiMapping(UserErrorException):
"""Exception raised when connection_type_to_api_mapping is empty and llm node provider can't be inferred"""
def __init__(self):
super().__init__(
message="LLM api mapping is empty, please ensure 'promptflow-tools' package has been installed.",
target=ErrorTarget.EXECUTOR,
)
class ResolveToolError(PromptflowException):
"""Exception raised when tool load failed.
It is used to append the name of the failed node to the error message to improve the user experience.
It simply wraps the error thrown by the Resolve Tool phase.
It has the same additional_info and error_codes as inner error.
"""
def __init__(self, *, node_name: str, target: ErrorTarget = ErrorTarget.EXECUTOR, module: str = None):
self._node_name = node_name
super().__init__(target=target, module=module)
@property
def message(self):
if self.inner_exception:
error_type_and_message = f"({self.inner_exception.__class__.__name__}) {self.inner_exception}"
if isinstance(self.inner_exception, TemplateSyntaxError):
error_type_and_message = (
f"Jinja parsing failed at line {self.inner_exception.lineno}: {error_type_and_message}"
)
return remove_suffix(self._message, ".") + f": {error_type_and_message}"
return self._message
@property
def message_format(self):
return "Tool load failed in '{node_name}'."
@property
def message_parameters(self):
return {"node_name": self._node_name}
@property
def additional_info(self):
"""Get additional info from innererror when the innererror is PromptflowException"""
if isinstance(self.inner_exception, PromptflowException):
return self.inner_exception.additional_info
return None
@property
def error_codes(self):
"""The hierarchy of the error codes.
We follow the "Microsoft REST API Guidelines" to define error codes in a hierarchy style.
See the below link for details:
https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md#7102-error-condition-responses
Due to ResolveToolError has no classification of its own.
Its error_codes respect the inner_error.
"""
if self.inner_exception:
return ExceptionPresenter.create(self.inner_exception).error_codes
return [infer_error_code_from_class(SystemErrorException), self.__class__.__name__]
class UnsupportedAssistantToolType(ValidationException):
pass
| promptflow/src/promptflow/promptflow/executor/_errors.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/executor/_errors.py",
"repo_id": "promptflow",
"token_count": 2722
} | 25 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import json
import os
from promptflow._core.connection_manager import ConnectionManager
from promptflow._utils.exception_utils import ErrorResponse, ExceptionPresenter, JsonSerializedPromptflowException
from promptflow._utils.logger_utils import LogContext, logger
from promptflow.executor._service.contracts.execution_request import BaseExecutionRequest
def get_log_context(request: BaseExecutionRequest):
run_mode = request.get_run_mode()
credential_list = ConnectionManager(request.connections).get_secret_list()
return LogContext(file_path=request.log_path, run_mode=run_mode, credential_list=credential_list)
def generate_error_response(ex):
if isinstance(ex, JsonSerializedPromptflowException):
error_dict = json.loads(ex.message)
else:
error_dict = ExceptionPresenter.create(ex).to_dict(include_debug_info=True)
logger.error(f"Failed to execute the flow: \n{ex}")
return ErrorResponse.from_error_dict(error_dict)
def set_environment_variables(request: BaseExecutionRequest):
if isinstance(request.environment_variables, dict) and request.environment_variables:
os.environ.update(request.environment_variables)
| promptflow/src/promptflow/promptflow/executor/_service/utils/service_utils.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/executor/_service/utils/service_utils.py",
"repo_id": "promptflow",
"token_count": 392
} | 26 |
# Use flow in Azure ML pipeline job
After you have developed and tested the flow in [init and test a flow](../../how-to-guides/init-and-test-a-flow.md), this guide will help you learn how to use a flow as a parallel component in a pipeline job on AzureML, so that you can integrate the created flow with existing pipelines and process a large amount of data.
:::{admonition} Pre-requirements
- Customer need to install the extension `ml>=2.21.0` to enable this feature in CLI and package `azure-ai-ml>=1.11.0` to enable this feature in SDK;
- Customer need to put `$schema` in the target `flow.dag.yaml` to enable this feature;
- `flow.dag.yaml`: `$schema`: `https://azuremlschemas.azureedge.net/promptflow/latest/Flow.schema.json`
- `run.yaml`: `$schema`: `https://azuremlschemas.azureedge.net/promptflow/latest/Run.schema.json`
- Customer need to generate `flow.tools.json` for the target flow before below usage. The generation can be done by `pf flow validate`.
:::
For more information about AzureML and component:
- [Install and set up the CLI(v2)](https://learn.microsoft.com/en-us/azure/machine-learning/how-to-configure-cli?view=azureml-api-2&tabs=public)
- [Install and set up the SDK(v2)](https://learn.microsoft.com/en-us/python/api/overview/azure/ai-ml-readme?view=azure-python)
- [What is a pipeline](https://learn.microsoft.com/en-us/azure/machine-learning/concept-ml-pipelines?view=azureml-api-2)
- [What is a component](https://learn.microsoft.com/en-us/azure/machine-learning/concept-component?view=azureml-api-2)
## Register a flow as a component
Customer can register a flow as a component with either CLI or SDK.
::::{tab-set}
:::{tab-item} CLI
:sync: CLI
```bash
# Register flow as a component
# Default component name will be the name of flow folder, which is not a valid component name, so we override it here; default version will be "1"
az ml component create --file standard/web-classification/flow.dag.yaml --set name=web_classification
# Register flow as a component with parameters override
az ml component create --file standard/web-classification/flow.dag.yaml --version 2 --set name=web_classification_updated
```
:::
:::{tab-item} SDK
:sync: SDK
```python
from azure.ai.ml import MLClient, load_component
ml_client = MLClient()
# Register flow as a component
flow_component = load_component("standard/web-classification/flow.dag.yaml")
# Default component name will be the name of flow folder, which is not a valid component name, so we override it here; default version will be "1"
flow_component.name = "web_classification"
ml_client.components.create_or_update(flow_component)
# Register flow as a component with parameters override
ml_client.components.create_or_update(
"standard/web-classification/flow.dag.yaml",
version="2",
params_override=[
{"name": "web_classification_updated"}
]
)
```
:::
::::
After registered a flow as a component, they can be referred in a pipeline job like [regular registered components](https://github.com/Azure/azureml-examples/tree/main/cli/jobs/pipelines-with-components/basics/1b_e2e_registered_components).
## Directly use a flow in a pipeline job
Besides explicitly registering a flow as a component, customer can also directly use flow in a pipeline job:
All connections and flow inputs will be exposed as input parameters of the component. Default value can be provided in flow/run definition; they can also be set/overwrite on job submission:
::::{tab-set}
:::{tab-item} CLI
:sync: CLI
```yaml
...
jobs:
flow_node:
type: parallel
component: standard/web-classification/flow.dag.yaml
inputs:
data: ${{parent.inputs.web_classification_input}}
url: "${data.url}"
connections.summarize_text_content.connection: azure_open_ai_connection
connections.summarize_text_content.deployment_name: text-davinci-003
...
```
Above is part of the pipeline job yaml, see here for [full example](https://github.com/Azure/azureml-examples/tree/main/cli/jobs/pipelines-with-components/pipeline_job_with_flow_as_component).
:::
:::{tab-item} SDK
:sync: SDK
```python
from azure.identity import DefaultAzureCredential
from azure.ai.ml import MLClient, load_component, Input
from azure.ai.ml.dsl import pipeline
credential = DefaultAzureCredential()
ml_client = MLClient.from_config(credential=credential)
data_input = Input(path="standard/web-classification/data.jsonl", type='uri_file')
# Load flow as a component
flow_component = load_component("standard/web-classification/flow.dag.yaml")
@pipeline
def pipeline_func_with_flow(data):
flow_node = flow_component(
data=data,
url="${data.url}",
connections={
"summarize_text_content": {
"connection": "azure_open_ai_connection",
"deployment_name": "text-davinci-003",
},
},
)
flow_node.compute = "cpu-cluster"
pipeline_with_flow = pipeline_func_with_flow(data=data_input)
pipeline_job = ml_client.jobs.create_or_update(pipeline_with_flow)
ml_client.jobs.stream(pipeline_job.name)
```
Above is part of the pipeline job python code, see here for [full example](https://github.com/Azure/azureml-examples/tree/main/sdk/python/jobs/pipelines/1l_flow_in_pipeline).
:::
::::
## Difference across flow in prompt flow and pipeline job
In prompt flow, flow runs on [runtime](https://learn.microsoft.com/en-us/azure/machine-learning/prompt-flow/concept-runtime), which is designed for prompt flow and easy to customize; while in pipeline job, flow runs on different types of compute, and usually compute cluster.
Given above, if your flow has logic relying on identity or environment variable, please be aware of this difference as you might run into some unexpected error(s) when the flow runs in pipeline job, and you might need some extra configurations to make it work.
| promptflow/docs/cloud/azureai/use-flow-in-azure-ml-pipeline.md/0 | {
"file_path": "promptflow/docs/cloud/azureai/use-flow-in-azure-ml-pipeline.md",
"repo_id": "promptflow",
"token_count": 1931
} | 0 |
# Deploy a flow
A flow can be deployed to multiple platforms, such as a local development service, Docker container, Kubernetes cluster, etc.
```{gallery-grid}
:grid-columns: 1 2 2 3
- image: ../../media/how-to-guides/local.png
content: "<center><b>Development server</b></center>"
website: deploy-using-dev-server.html
- image: ../../media/how-to-guides/docker.png
content: "<center><b>Docker</b></center>"
website: deploy-using-docker.html
- image: ../../media/how-to-guides/kubernetes.png
content: "<center><b>Kubernetes</b></center>"
website: deploy-using-kubernetes.html
```
We also provide guides to deploy to cloud, such as azure app service:
```{gallery-grid}
:grid-columns: 1 2 2 3
- image: ../../media/how-to-guides/appservice.png
content: "<center><b>Azure App Service</b></center>"
website: ../../cloud/azureai/deploy-to-azure-appservice.html
```
We are working on more official deployment guides for other hosting providers, and welcome user submitted guides.
```{toctree}
:maxdepth: 1
:hidden:
deploy-using-dev-server
deploy-using-docker
deploy-using-kubernetes
distribute-flow-as-executable-app
``` | promptflow/docs/how-to-guides/deploy-a-flow/index.md/0 | {
"file_path": "promptflow/docs/how-to-guides/deploy-a-flow/index.md",
"repo_id": "promptflow",
"token_count": 397
} | 1 |
# Execute flow as a function
:::{admonition} Experimental feature
This is an experimental feature, and may change at any time. Learn [more](faq.md#stable-vs-experimental).
:::
## Overview
Promptflow allows you to load a flow and use it as a function in your code.
This feature is useful when building a service on top of a flow, reference [here](https://github.com/microsoft/promptflow/tree/main/examples/tutorials/flow-deploy/create-service-with-flow) for a simple example service with flow function consumption.
## Load an invoke the flow function
To use the flow-as-function feature, you first need to load a flow using the `load_flow` function.
Then you can consume the flow object like a function by providing key-value arguments for it.
```python
f = load_flow("../../examples/flows/standard/web-classification/")
f(url="sample_url")
```
## Config the flow with context
You can overwrite some flow configs before flow function execution by setting `flow.context`.
### Load flow as a function with in-memory connection override
By providing a connection object to flow context, flow won't need to get connection in execution time, which can save time when for cases where flow function need to be called multiple times.
```python
from promptflow.entities import AzureOpenAIConnection
connection_obj = AzureOpenAIConnection(
name=conn_name,
api_key=api_key,
api_base=api_base,
api_type="azure",
api_version=api_version,
)
# no need to create the connection object.
f.context = FlowContext(
connections={"classify_with_llm": {"connection": connection_obj}}
)
```
### Local flow as a function with flow inputs override
By providing overrides, the original flow dag will be updated in execution time.
```python
f.context = FlowContext(
# node "fetch_text_content_from_url" will take inputs from the following command instead of from flow input
overrides={"nodes.fetch_text_content_from_url.inputs.url": sample_url},
)
```
**Note**, the `overrides` are only doing YAML content replacement on original `flow.dag.yaml`.
If the `flow.dag.yaml` become invalid after `overrides`, validation error will be raised when executing.
### Load flow as a function with streaming output
After set `streaming` in flow context, the flow function will return an iterator to stream the output.
```python
f = load_flow(source="../../examples/flows/chat/basic-chat/")
f.context.streaming = True
result = f(
chat_history=[
{
"inputs": {"chat_input": "Hi"},
"outputs": {"chat_output": "Hello! How can I assist you today?"},
}
],
question="How are you?",
)
answer = ""
# the result will be a generator, iterate it to get the result
for r in result["answer"]:
answer += r
```
Reference our [sample](https://github.com/microsoft/promptflow/blob/main/examples/tutorials/get-started/flow-as-function.ipynb) for usage.
## Flow with multiple overrides
**Note**: the flow context configs may affect each other in some cases. For example, using `connection` & `overrides` to override same node.
The behavior is undefined for those scenarios. Pleas avoid such usage.
```python
# overriding `classify_with_llm`'s connection and inputs in the same time will lead to undefined behavior.
f.context = FlowContext(
connections={"classify_with_llm": {"connection": connection_obj}},
overrides={"nodes.classify_with_llm.inputs.url": sample_url}
)
```
## Next steps
Learn more about:
- [Flow as a function sample](https://github.com/microsoft/promptflow/blob/main/examples/tutorials/get-started/flow-as-function.ipynb)
- [Deploy a flow](./deploy-a-flow/index.md)
| promptflow/docs/how-to-guides/execute-flow-as-a-function.md/0 | {
"file_path": "promptflow/docs/how-to-guides/execute-flow-as-a-function.md",
"repo_id": "promptflow",
"token_count": 1125
} | 2 |
# Custom Tools
This section contains documentation for custom tools created by the community to extend Prompt flow's capabilities for specific use cases. These tools are developed following the guide on [Creating and Using Tool Packages](../../how-to-guides/develop-a-tool/create-and-use-tool-package.md). They are not officially maintained or endorsed by the Prompt flow team. For questions or issues when using a tool, please use the support contact link in the table below.
## Tool Package Index
The table below provides an index of custom tool packages. The columns contain:
- **Package Name:** The name of the tool package. Links to the package documentation.
- **Description:** A short summary of what the tool package does.
- **Owner:** The creator/maintainer of the tool package.
- **Support Contact:** Link to contact for support and reporting new issues.
| Package Name | Description | Owner | Support Contact |
|-|-|-|-|
| promptflow-azure-ai-language | Collection of Azure AI Language Prompt flow tools. | Sean Murray | [email protected] |
```{toctree}
:maxdepth: 1
:hidden:
azure-ai-language-tool
```
| promptflow/docs/integrations/tools/index.md/0 | {
"file_path": "promptflow/docs/integrations/tools/index.md",
"repo_id": "promptflow",
"token_count": 287
} | 3 |
# Vector DB Lookup
Vector DB Lookup is a vector search tool that allows users to search top k similar vectors from vector database. This tool is a wrapper for multiple third-party vector databases. The list of current supported databases is as follows.
| Name | Description |
| --- | --- |
| Azure Cognitive Search | Microsoft's cloud search service with built-in AI capabilities that enrich all types of information to help identify and explore relevant content at scale. |
| Qdrant | Qdrant is a vector similarity search engine that provides a production-ready service with a convenient API to store, search and manage points (i.e. vectors) with an additional payload. |
| Weaviate | Weaviate is an open source vector database that stores both objects and vectors. This allows for combining vector search with structured filtering. |
This tool will support more vector databases.
## Requirements
- For AzureML users, the tool is installed in default image, you can use the tool without extra installation.
- For local users,
`pip install promptflow-vectordb`
## Prerequisites
The tool searches data from a third-party vector database. To use it, you should create resources in advance and establish connection between the tool and the resource.
- **Azure Cognitive Search:**
- Create resource [Azure Cognitive Search](https://learn.microsoft.com/en-us/azure/search/search-create-service-portal).
- Add "Cognitive search" connection. Fill "API key" field with "Primary admin key" from "Keys" section of created resource, and fill "API base" field with the URL, the URL format is `https://{your_serive_name}.search.windows.net`.
- **Qdrant:**
- Follow the [installation](https://qdrant.tech/documentation/quick-start/) to deploy Qdrant to a self-maintained cloud server.
- Add "Qdrant" connection. Fill "API base" with your self-maintained cloud server address and fill "API key" field.
- **Weaviate:**
- Follow the [installation](https://weaviate.io/developers/weaviate/installation) to deploy Weaviate to a self-maintained instance.
- Add "Weaviate" connection. Fill "API base" with your self-maintained instance address and fill "API key" field.
## Inputs
The tool accepts the following inputs:
- **Azure Cognitive Search:**
| Name | Type | Description | Required |
| ---- | ---- | ----------- | -------- |
| connection | CognitiveSearchConnection | The created connection for accessing to Cognitive Search endpoint. | Yes |
| index_name | string | The index name created in Cognitive Search resource. | Yes |
| text_field | string | The text field name. The returned text field will populate the text of output. | No |
| vector_field | string | The vector field name. The target vector is searched in this vector field. | Yes |
| search_params | dict | The search parameters. It's key-value pairs. Except for parameters in the tool input list mentioned above, additional search parameters can be formed into a JSON object as search_params. For example, use `{"select": ""}` as search_params to select the returned fields, use `{"search": ""}` to perform a [hybrid search](https://learn.microsoft.com/en-us/azure/search/search-get-started-vector#hybrid-search). | No |
| search_filters | dict | The search filters. It's key-value pairs, the input format is like `{"filter": ""}` | No |
| vector | list | The target vector to be queried, which can be generated by Embedding tool. | Yes |
| top_k | int | The count of top-scored entities to return. Default value is 3 | No |
- **Qdrant:**
| Name | Type | Description | Required |
| ---- | ---- | ----------- | -------- |
| connection | QdrantConnection | The created connection for accessing to Qdrant server. | Yes |
| collection_name | string | The collection name created in self-maintained cloud server. | Yes |
| text_field | string | The text field name. The returned text field will populate the text of output. | No |
| search_params | dict | The search parameters can be formed into a JSON object as search_params. For example, use `{"params": {"hnsw_ef": 0, "exact": false, "quantization": null}}` to set search_params. | No |
| search_filters | dict | The search filters. It's key-value pairs, the input format is like `{"filter": {"should": [{"key": "", "match": {"value": ""}}]}}` | No |
| vector | list | The target vector to be queried, which can be generated by Embedding tool. | Yes |
| top_k | int | The count of top-scored entities to return. Default value is 3 | No |
- **Weaviate:**
| Name | Type | Description | Required |
| ---- | ---- | ----------- | -------- |
| connection | WeaviateConnection | The created connection for accessing to Weaviate. | Yes |
| class_name | string | The class name. | Yes |
| text_field | string | The text field name. The returned text field will populate the text of output. | No |
| vector | list | The target vector to be queried, which can be generated by Embedding tool. | Yes |
| top_k | int | The count of top-scored entities to return. Default value is 3 | No |
## Outputs
The following is an example JSON format response returned by the tool, which includes the top-k scored entities. The entity follows a generic schema of vector search result provided by promptflow-vectordb SDK.
- **Azure Cognitive Search:**
For Azure Cognitive Search, the following fields are populated:
| Field Name | Type | Description |
| ---- | ---- | ----------- |
| original_entity | dict | the original response json from search REST API|
| score | float | @search.score from the original entity, which evaluates the similarity between the entity and the query vector |
| text | string | text of the entity|
| vector | list | vector of the entity|
<details>
<summary>Output</summary>
```json
[
{
"metadata": null,
"original_entity": {
"@search.score": 0.5099789,
"id": "",
"your_text_filed_name": "sample text1",
"your_vector_filed_name": [-0.40517663431890405, 0.5856996257406859, -0.1593078462266455, -0.9776269170785785, -0.6145604369828972],
"your_additional_field_name": ""
},
"score": 0.5099789,
"text": "sample text1",
"vector": [-0.40517663431890405, 0.5856996257406859, -0.1593078462266455, -0.9776269170785785, -0.6145604369828972]
}
]
```
</details>
- **Qdrant:**
For Qdrant, the following fields are populated:
| Field Name | Type | Description |
| ---- | ---- | ----------- |
| original_entity | dict | the original response json from search REST API|
| metadata | dict | payload from the original entity|
| score | float | score from the original entity, which evaluates the similarity between the entity and the query vector|
| text | string | text of the payload|
| vector | list | vector of the entity|
<details>
<summary>Output</summary>
```json
[
{
"metadata": {
"text": "sample text1"
},
"original_entity": {
"id": 1,
"payload": {
"text": "sample text1"
},
"score": 1,
"vector": [0.18257418, 0.36514837, 0.5477226, 0.73029673],
"version": 0
},
"score": 1,
"text": "sample text1",
"vector": [0.18257418, 0.36514837, 0.5477226, 0.73029673]
}
]
```
</details>
- **Weaviate:**
For Weaviate, the following fields are populated:
| Field Name | Type | Description |
| ---- | ---- | ----------- |
| original_entity | dict | the original response json from search REST API|
| score | float | certainty from the original entity, which evaluates the similarity between the entity and the query vector|
| text | string | text in the original entity|
| vector | list | vector of the entity|
<details>
<summary>Output</summary>
```json
[
{
"metadata": null,
"original_entity": {
"_additional": {
"certainty": 1,
"distance": 0,
"vector": [
0.58,
0.59,
0.6,
0.61,
0.62
]
},
"text": "sample text1."
},
"score": 1,
"text": "sample text1.",
"vector": [
0.58,
0.59,
0.6,
0.61,
0.62
]
}
]
```
</details> | promptflow/docs/reference/tools-reference/vector_db_lookup_tool.md/0 | {
"file_path": "promptflow/docs/reference/tools-reference/vector_db_lookup_tool.md",
"repo_id": "promptflow",
"token_count": 2697
} | 4 |
from pathlib import Path
from ruamel.yaml import YAML
def collect_tools_from_directory(base_dir) -> dict:
tools = {}
yaml = YAML()
for f in Path(base_dir).glob("**/*.yaml"):
with open(f, "r") as f:
tools_in_file = yaml.load(f)
for identifier, tool in tools_in_file.items():
tools[identifier] = tool
return tools
def list_package_tools():
"""List package tools"""
yaml_dir = Path(__file__).parent / "yamls"
return collect_tools_from_directory(yaml_dir)
| promptflow/src/promptflow-tools/promptflow/tools/list.py/0 | {
"file_path": "promptflow/src/promptflow-tools/promptflow/tools/list.py",
"repo_id": "promptflow",
"token_count": 233
} | 5 |
# Release History
## 1.6.0 (TBD)
### Features Added
- [SDK/CLI] Support configuring environment variable to directly use `AzureCliCredential` for `pfazure` commands.
```dotenv
PF_USE_AZURE_CLI_CREDENTIAL=true
```
## 1.5.0 (2024.02.06)
### Features Added
- [SDK/CLI][azure] Support specify compute instance as session compute in run.yaml
- [SDK/CLI][azure] Stop support specifying `idle_time_before_shutdown_minutes` for automatic runtime since each session will be auto deleted after execution.
### Bugs Fixed
- [SDK/CLI] The inputs of node test allows the value of reference node output be passed directly in.
- [SDK/CLI][azure] Fixed bug for cloud batch run referencing registry flow with automatic runtime.
- [SDK/CLI] Fix "Without Import Data" in run visualize page when invalid JSON value exists in metrics.
- [SDK/CLI][azure] Fix azureml serving get UAI(user assigned identity) token failure bug.
- [SDK/CLI] Fix flow as function connection override when node has default variant.
### Improvements
- [SDK/CLI] For `pf run delete`, `pf connection delete`, introducing an option to skip confirmation prompts.
- [SDK/CLI] Move pfs extra dependency to required dependency.
## 1.4.0 (2024.01.22)
### Features Added
- [Executor] Calculate system_metrics recursively in api_calls.
- [Executor] Add flow root level api_calls, so that user can overview the aggregated metrics of a flow.
- [Executor] Add @trace decorator to make it possible to log traces for functions that are called by tools.
- [Tool] InputSetting of tool supports passing undefined configuration.
- [SDK/CLI][azure] Switch automatic runtime's session provision to system wait.
- [SDK/CLI] Add `--skip-open-browser` option to `pf flow serve` to skip opening browser.
- [SDK/CLI][azure] Support submit flow to sovereign cloud.
- [SDK/CLI] Support `pf run delete` to delete a run irreversibly.
- [SDK/CLI][azure] Automatically put requirements.txt to flow.dag.yaml if exists in flow snapshot.
- [SDK/CLI] Support `pf upgrade` to upgrade prompt flow to the latest version.
- [SDK/CLI] Support env variables in yaml file.
### Bugs Fixed
- Fix unaligned inputs & outputs or pandas exception during get details against run in Azure.
- Fix loose flow path validation for run schema.
- Fix "Without Import Data" in run visualize page results from invalid JSON value (`-Infinity`, `Infinity` and `NaN`).
- Fix "ValueError: invalid width -1" when show-details against long column(s) in narrow terminal window.
- Fix invalid tool code generated when initializing the script tool with icon.
### Improvements
- [SDK/CLI] For `pfazure flow create`:
- If used by non-msft tenant user, use user name instead of user object id in the remote flow folder path. (e.g. `Users/<user-name>/promptflow`).
- When flow has unknown attributes, log warning instead of raising error.
- Use local flow folder name and timestamp as the azure flow file share folder name.
- [SDK/CLI] For `pf/pfazure run create`, when run has unknown attribute, log warning instead of raising error.
- Replace `pyyaml` with `ruamel.yaml` to adopt YAML 1.2 specification.
## 1.3.0 (2023.12.27)
### Features Added
- [SDK/CLI] Support `pfazure run cancel` to cancel a run on Azure AI.
- Add support to configure prompt flow home directory via environment variable `PF_HOME_DIRECTORY`.
- Please set before importing `promptflow`, otherwise it won't take effect.
- [Executor] Handle KeyboardInterrupt in flow test so that the final state is Canceled.
### Bugs Fixed
- [SDK/CLI] Fix single node run doesn't work when consuming sub item of upstream node
### Improvements
- Change `ruamel.yaml` lower bound to 0.17.10.
- [SDK/CLI] Improve `pfazure run download` to handle large run data files.
- [Executor] Exit the process when all async tools are done or exceeded timeout after cancellation.
## 1.2.0 (2023.12.14)
### Features Added
- [SDK/CLI] Support `pfazure run download` to download run data from Azure AI.
- [SDK/CLI] Support `pf run create` to create a local run record from downloaded run data.
### Bugs Fixed
- [SDK/CLI] Removing telemetry warning when running commands.
- Empty node stdout & stderr to avoid large visualize HTML.
- Hide unnecessary fields in run list for better readability.
- Fix bug that ignores timeout lines in batch run status summary.
## 1.1.1 (2023.12.1)
### Bugs Fixed
- [SDK/CLI] Fix compatibility issue with `semantic-kernel==0.4.0.dev0` and `azure-ai-ml==1.12.0`.
- [SDK/CLI] Add back workspace information in CLI telemetry.
- [SDK/CLI] Disable the feature to customize user agent in CLI to avoid changes on operation context.
- Fix openai metrics calculator to adapt openai v1.
## 1.1.0 (2023.11.30)
### Features Added
- Add `pfazure flow show/list` to show or list flows from Azure AI.
- Display node status in run visualize page graph view.
- Add support for image input and output in prompt flow.
- [SDK/CLI] SDK/CLI will collect telemetry by default, user can use `pf config set telemetry.enabled=false` to opt out.
- Add `raise_on_error` for stream run API, by default we raise for failed run.
- Flow as function: consume a flow like a function with parameters mapped to flow inputs.
- Enable specifying the default output path for run.
- Use `pf config set run.output_path=<output-path>` to specify, and the run output path will be `<output-path>/<run-name>`.
- Introduce macro `${flow_directory}` for `run.output_path` in config, which will be replaced with corresponding flow directory.
- The flow directory cannot be set as run output path, which means `pf config set run.output_path='${flow_directory}'` is invalid; but you can use child folder, e.g. `pf config set run.output_path='${flow_directory}/.runs'`.
- Support pfazure run create with remote flow.
- For remote workspace flow: `pfazure run create --flow azureml:<flow-name>`
- For remote registry flow: `pfazure run create --flow azureml://registries/<registry-name>/models/<flow-name>/versions/<flow-version>`
- Support set logging level via environment variable `PF_LOGGING_LEVEL`, valid values includes `CRITICAL`, `ERROR`, `WARNING`, `INFO`, `DEBUG`, default to `INFO`.
- Remove openai version restrictions
### Bugs Fixed
- [SDK/CLI] Fix node test with dict node input will raise "Required input(s) missing".
- [SDK/CLI] Will use run name as display name when display name not specified (used flow folder name before).
- [SDK/CLI] Fix pf flow build created unexpected layer of dist folder
- [SDK/CLI] Fix deploy prompt flow: connections value may be none
### Improvements
- Force 'az login' if using azureml connection provider in cli command.
- Add env variable 'PF_NO_INTERACTIVE_LOGIN' to disable interactive login if using azureml connection provider in promptflow sdk.
- Improved CLI invoke time.
- Bump `pydash` upper bound to 8.0.0.
- Bump `SQLAlchemy` upper bound to 3.0.0.
- Bump `flask` upper bound to 4.0.0, `flask-restx` upper bound to 2.0.0.
- Bump `ruamel.yaml` upper bound to 1.0.0.
## 1.0.0 (2023.11.09)
### Features Added
- [Executor] Add `enable_kwargs` tag in tools.json for customer python tool.
- [SDK/CLI] Support `pfazure flow create`. Create a flow on Azure AI from local flow folder.
- [SDK/CLI] Changed column mapping `${run.inputs.xx}`'s behavior, it will refer to run's data columns instead of run's inputs columns.
### Bugs Fixed
- [SDK/CLI] Keep original format in run output.jsonl.
- [Executor] Fix the bug that raise an error when an aggregation node references a bypassed node
### Improvements
- [Executor] Set the outputs of the bypassed nodes as None
## 0.1.0b8 (2023.10.26)
### Features Added
- [Executor] Add average execution time and estimated execution time to batch run logs
- [SDK/CLI] Support `pfazure run archive/restore/update`.
- [SDK/CLI] Support custom strong type connection.
- [SDK/CLI] Enable telemetry and won't collect by default, use `pf config set cli.telemetry_enabled=true` to opt in.
- [SDK/CLI] Exposed function `from promptflow import load_run` to load run object from local YAML file.
- [Executor] Support `ToolProvider` for script tools.
### Bugs Fixed
- **pf config set**:
- Fix bug for workspace `connection.provider=azureml` doesn't work as expected.
- [SDK/CLI] Fix the bug that using sdk/cli to submit batch run did not display the log correctly.
- [SDK/CLI] Fix encoding issues when input is non-English with `pf flow test`.
- [Executor] Fix the bug can't read file containing "Private Use" unicode character.
- [SDK/CLI] Fix string type data will be converted to integer/float.
- [SDK/CLI] Remove the max rows limitation of loading data.
- [SDK/CLI] Fix the bug --set not taking effect when creating run from file.
### Improvements
- [SDK/CLI] Experience improvements in `pf run visualize` page:
- Add column status.
- Support opening flow file by clicking run id.
## 0.1.0b7.post1 (2023.09.28)
### Bug Fixed
- Fix extra dependency bug when importing `promptflow` without `azure-ai-ml` installed.
## 0.1.0b7 (2023.09.27)
### Features Added
- **pf flow validate**: support validate flow
- **pf config set**: support set user-level promptflow config.
- Support workspace connection provider, usage: `pf config set connection.provider=azureml://subscriptions/<subscription_id>/resourceGroups/<resource_group>/providers/Microsoft.MachineLearningServices/workspaces/<workspace_name>`
- Support override openai connection's model when submitting a flow. For example: `pf run create --flow ./ --data ./data.jsonl --connection llm.model=xxx --column-mapping url='${data.url}'`
### Bugs Fixed
- [Flow build] Fix flow build file name and environment variable name when connection name contains space.
- Reserve `.promptflow` folder when dump run snapshot.
- Read/write log file with encoding specified.
- Avoid inconsistent error message when executor exits abnormally.
- Align inputs & outputs row number in case partial completed run will break `pfazure run show-details`.
- Fix bug that failed to parse portal url for run data when the form is an asset id.
- Fix the issue of process hanging for a long time when running the batch run.
### Improvements
- [Executor][Internal] Improve error message with more details and actionable information.
- [SDK/CLI] `pf/pfazure run show-details`:
- Add `--max-results` option to control the number of results to display.
- Add `--all-results` option to display all results.
- Add validation for azure `PFClient` constructor in case wrong parameter is passed.
## 0.1.0b6 (2023.09.15)
### Features Added
- [promptflow][Feature] Store token metrics in run properties
### Bugs Fixed
- Refine error message body for flow_validator.py
- Refine error message body for run_tracker.py
- [Executor][Internal] Add some unit test to improve code coverage of log/metric
- [SDK/CLI] Update portal link to remove flight.
- [Executor][Internal] Improve inputs mapping's error message.
- [API] Resolve warnings/errors of sphinx build
## 0.1.0b5 (2023.09.08)
### Features Added
- **pf run visualize**: support lineage graph & display name in visualize page
### Bugs Fixed
- Add missing requirement `psutil` in `setup.py`
## 0.1.0b4 (2023.09.04)
### Features added
- Support `pf flow build` commands
## 0.1.0b3 (2023.08.30)
- Minor bug fixes.
## 0.1.0b2 (2023.08.29)
- First preview version with major CLI & SDK features.
### Features added
- **pf flow**: init/test/serve/export
- **pf run**: create/update/stream/list/show/show-details/show-metrics/visualize/archive/restore/export
- **pf connection**: create/update/show/list/delete
- Azure AI support:
- **pfazure run**: create/list/stream/show/show-details/show-metrics/visualize
## 0.1.0b1 (2023.07.20)
- Stub version in Pypi.
| promptflow/src/promptflow/CHANGELOG.md/0 | {
"file_path": "promptflow/src/promptflow/CHANGELOG.md",
"repo_id": "promptflow",
"token_count": 3539
} | 6 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import inspect
import json
import shutil
from abc import ABC, abstractmethod
from ast import literal_eval
from enum import Enum
from pathlib import Path
from jinja2 import Environment, Template, meta
from promptflow._sdk._constants import DEFAULT_ENCODING
from promptflow._sdk.operations._flow_operations import FlowOperations
from promptflow._utils.logger_utils import get_cli_sdk_logger
from promptflow.contracts.flow import Flow as ExecutableFlow
from promptflow.exceptions import UserErrorException
logger = get_cli_sdk_logger()
TEMPLATE_PATH = Path(__file__).parent.parent / "data" / "entry_flow"
CHAT_FLOW_TEMPLATE_PATH = Path(__file__).parent.parent / "data" / "chat_flow" / "template"
TOOL_TEMPLATE_PATH = Path(__file__).parent.parent / "data" / "package_tool"
EXTRA_FILES_MAPPING = {"requirements.txt": "requirements_txt", ".gitignore": "gitignore"}
SERVE_TEMPLATE_PATH = Path(__file__).resolve().parent.parent.parent / "_sdk" / "data" / "executable"
class BaseGenerator(ABC):
@property
@abstractmethod
def tpl_file(self):
pass
@property
@abstractmethod
def entry_template_keys(self):
pass
def generate(self) -> str:
"""Generate content based on given template and actual value of template keys."""
with open(self.tpl_file, encoding=DEFAULT_ENCODING) as f:
entry_template = f.read()
entry_template = Template(entry_template, trim_blocks=True, lstrip_blocks=True)
return entry_template.render(**{key: getattr(self, key) for key in self.entry_template_keys})
def generate_to_file(self, target):
"""Generate content to a file based on given template and actual value of template keys."""
target = Path(target).resolve()
action = "Overwriting" if target.exists() else "Creating"
print(f"{action} {target.resolve()}...")
with open(target, "w", encoding=DEFAULT_ENCODING) as f:
f.write(self.generate())
class ToolPyGenerator(BaseGenerator):
def __init__(self, entry, function, function_obj):
self.function_import = f"from {Path(entry).stem} import {function}"
self.entry_function = function
self.tool_function = f"{function}_tool"
# TODO: support default for tool args
self.tool_arg_list = inspect.signature(function_obj).parameters.values()
@property
def tpl_file(self):
return TEMPLATE_PATH / "tool.py.jinja2"
@property
def entry_template_keys(self):
return ["function_import", "entry_function", "tool_function", "tool_arg_list"]
class ValueType(str, Enum):
INT = "int"
DOUBLE = "double"
BOOL = "bool"
STRING = "string"
LIST = "list"
OBJECT = "object"
@staticmethod
def from_type(t: type):
if t == int:
return ValueType.INT
if t == float:
return ValueType.DOUBLE
if t == bool:
return ValueType.BOOL
if t == str:
return ValueType.STRING
if t == list:
return ValueType.LIST
return ValueType.OBJECT
class ToolMetaGenerator(BaseGenerator):
def __init__(self, tool_py, function, function_obj, prompt_params):
self.tool_file = tool_py
self.tool_function = f"{function}_tool"
# TODO: support default for tool meta args
self.tool_meta_args = self.get_tool_meta_args(function_obj)
self._prompt_params = prompt_params
@property
def prompt_params(self):
from promptflow._core.tool_meta_generator import generate_prompt_meta_dict
prompt_objs = {}
for key, file_name in self._prompt_params.items():
file_path = Path(file_name)
if not file_path.exists():
logger.warning(
f'Cannot find the prompt template "{file_name}", creating an empty prompt file in the flow...'
)
with open(file_path, "w") as f:
f.write("{# please enter your prompt content in this file. #}")
with open(file_name, "r") as f:
content = f.read()
name = Path(file_name).stem
prompt_objs[key] = generate_prompt_meta_dict(name, content, prompt_only=True, source=file_name)
return prompt_objs
def get_tool_meta_args(self, function_obj):
func_params = inspect.signature(function_obj).parameters
# TODO: Support enum/union in the future
return {k: ValueType.from_type(v.annotation).value for k, v in func_params.items()}
@property
def tpl_file(self):
return TEMPLATE_PATH / "flow.tools.json.jinja2"
@property
def entry_template_keys(self):
return ["prompt_params", "tool_file", "tool_meta_args", "tool_function"]
class FlowDAGGenerator(BaseGenerator):
def __init__(self, tool_py, function, function_obj, prompt_params):
self.tool_file = tool_py
self.main_node_name = function
self.prompt_params = prompt_params
self.setup_sh = None
self.python_requirements_txt = None
self._prompt_inputs = None
self._func_params = None
self._function_obj = function_obj
# Abstract prompt param from tool meta args
self.flow_inputs = self.get_flow_inputs(prompt_params)
def get_flow_inputs(self, prompt_params):
"""Generate the flow inputs"""
flow_inputs = {
k: ValueType.from_type(v.annotation).value for k, v in self.func_params.items() if k not in prompt_params
}
for prompt_inputs in self.prompt_inputs.values():
flow_inputs.update(prompt_inputs)
return flow_inputs
@property
def tpl_file(self):
return TEMPLATE_PATH / "flow.dag.yaml.jinja2"
@property
def func_params(self):
"""Generate function inputs without prompt templates."""
if self._func_params is None:
self._func_params = {
k: v for k, v in inspect.signature(self._function_obj).parameters.items() if k not in self.prompt_params
}
return self._func_params
@property
def prompt_inputs(self):
"""Generate prompt inputs."""
if self._prompt_inputs is None:
self._prompt_inputs = {}
for prompt_name, file_name in self.prompt_params.items():
try:
with open(file_name, "r") as f:
env = Environment()
ast = env.parse(f.read())
variables = meta.find_undeclared_variables(ast)
self._prompt_inputs[prompt_name] = {item: "string" for item in variables or []}
except Exception as e:
logger.warning(f"Get the prompt input from {file_name} failed, {e}.")
return self._prompt_inputs
@property
def entry_template_keys(self):
return [
"flow_inputs",
"main_node_name",
"prompt_params",
"tool_file",
"setup_sh",
"python_requirements_txt",
"prompt_inputs",
"func_params",
]
def generate_to_file(self, target):
# Get requirements.txt and setup.sh from target folder.
requirements_file = "requirements.txt"
if (Path(target).parent / requirements_file).exists():
self.python_requirements_txt = requirements_file
setup_file = "setup.sh"
if (Path(target).parent / setup_file).exists():
self.setup_sh = setup_file
super().generate_to_file(target=target)
class FlowMetaYamlGenerator(BaseGenerator):
def __init__(self, flow_name):
self.flow_name = flow_name
@property
def tpl_file(self):
return TEMPLATE_PATH / "flow.meta.yaml.jinja2"
@property
def entry_template_keys(self):
return ["flow_name"]
class StreamlitFileReplicator:
def __init__(self, flow_name, flow_dag_path):
self.flow_name = flow_name
self.flow_dag_path = Path(flow_dag_path)
self.executable = ExecutableFlow.from_yaml(
flow_file=Path(self.flow_dag_path.name), working_dir=self.flow_dag_path.parent
)
self.is_chat_flow, self.chat_history_input_name, error_msg = FlowOperations._is_chat_flow(self.executable)
@property
def flow_inputs(self):
if self.is_chat_flow:
results = {}
for flow_input, value in self.executable.inputs.items():
if not value.is_chat_history:
if value.type.value not in [ValueType.STRING.value, ValueType.LIST.value]:
raise UserErrorException(
f"Only support string or list type for chat flow input, but got {value.type.value}."
)
results.update({flow_input: (value.default, value.type.value)})
else:
results = {
flow_input: (value.default, value.type.value) for flow_input, value in self.executable.inputs.items()
}
return results
@property
def label(self):
return "Chat" if self.is_chat_flow else "Run"
@property
def py_file(self):
return SERVE_TEMPLATE_PATH / "main.py"
@property
def flow_path(self):
return self.flow_dag_path.as_posix()
@property
def chat_output_name(self):
try:
output_name = next(
filter(
lambda key: self.executable.outputs[key].is_chat_output,
self.executable.outputs.keys(),
)
)
except StopIteration:
output_name = None
return output_name
@property
def is_streaming(self):
return True if self.is_chat_flow else False
@property
def entry_template_keys(self):
return [
"flow_name",
"flow_path",
"is_chat_flow",
"chat_history_input_name",
"flow_inputs",
"label",
"chat_output_name",
"is_streaming",
]
def generate_to_file(self, target):
if Path(target).name == "main.py":
target = Path(target).resolve()
shutil.copy(self.py_file, target)
config_content = {key: getattr(self, key) for key in self.entry_template_keys}
with open(target.parent / "config.json", "w") as file:
json.dump(config_content, file, indent=4)
else:
shutil.copy(SERVE_TEMPLATE_PATH / Path(target).name, target)
class ChatFlowDAGGenerator(BaseGenerator):
def __init__(self, connection, deployment):
self.connection = connection
self.deployment = deployment
@property
def tpl_file(self):
return CHAT_FLOW_TEMPLATE_PATH / "flow.dag.yaml.jinja2"
@property
def entry_template_keys(self):
return ["connection", "deployment"]
class AzureOpenAIConnectionGenerator(BaseGenerator):
def __init__(self, connection):
self.connection = connection
@property
def tpl_file(self):
return CHAT_FLOW_TEMPLATE_PATH / "azure_openai.yaml.jinja2"
@property
def entry_template_keys(self):
return ["connection"]
class OpenAIConnectionGenerator(BaseGenerator):
def __init__(self, connection):
self.connection = connection
@property
def tpl_file(self):
return CHAT_FLOW_TEMPLATE_PATH / "openai.yaml.jinja2"
@property
def entry_template_keys(self):
return ["connection"]
def copy_extra_files(flow_path, extra_files, overwrite=False):
for file_name in extra_files:
extra_file_path = (
Path(__file__).parent.parent / "data" / "entry_flow" / EXTRA_FILES_MAPPING.get(file_name, file_name)
)
target_path = Path(flow_path) / file_name
if target_path.exists() and not overwrite:
continue
action = "Overwriting" if target_path.exists() else "Creating"
print(f"{action} {target_path.resolve()}...")
shutil.copy2(extra_file_path, target_path)
class ToolPackageGenerator(BaseGenerator):
def __init__(self, tool_name, icon=None, extra_info=None):
self.tool_name = tool_name
self._extra_info = extra_info
self.icon = icon
@property
def extra_info(self):
if self._extra_info:
extra_info = {}
for k, v in self._extra_info.items():
try:
extra_info[k] = literal_eval(v)
except Exception:
extra_info[k] = repr(v)
return extra_info
else:
return {}
@property
def tpl_file(self):
return TOOL_TEMPLATE_PATH / "tool.py.jinja2"
@property
def entry_template_keys(self):
return ["tool_name", "extra_info", "icon"]
class SetupGenerator(BaseGenerator):
def __init__(self, package_name, tool_name):
self.package_name = package_name
self.tool_name = tool_name
@property
def tpl_file(self):
return TOOL_TEMPLATE_PATH / "setup.py.jinja2"
@property
def entry_template_keys(self):
return ["package_name", "tool_name"]
class ToolPackageUtilsGenerator(BaseGenerator):
def __init__(self, package_name):
self.package_name = package_name
@property
def tpl_file(self):
return TOOL_TEMPLATE_PATH / "utils.py.jinja2"
@property
def entry_template_keys(self):
return ["package_name"]
class ToolReadmeGenerator(BaseGenerator):
def __init__(self, package_name, tool_name):
self.package_name = package_name
self.tool_name = tool_name
@property
def tpl_file(self):
return TOOL_TEMPLATE_PATH / "README.md.jinja2"
@property
def entry_template_keys(self):
return ["package_name", "tool_name"]
class InitGenerator(BaseGenerator):
@property
def tpl_file(self):
return TOOL_TEMPLATE_PATH / "init.py"
@property
def entry_template_keys(self):
pass
def generate(self) -> str:
with open(self.tpl_file) as f:
init_content = f.read()
return init_content
| promptflow/src/promptflow/promptflow/_cli/_pf/_init_entry_generators.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_cli/_pf/_init_entry_generators.py",
"repo_id": "promptflow",
"token_count": 6363
} | 7 |
system:
You are a helpful assistant.
{% for item in chat_history %}
user:
{{item.inputs.question}}
assistant:
{{item.outputs.answer}}
{% endfor %}
user:
{{question}}
| promptflow/src/promptflow/promptflow/_cli/data/chat_flow/flow_files/chat.jinja2/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_cli/data/chat_flow/flow_files/chat.jinja2",
"repo_id": "promptflow",
"token_count": 62
} | 8 |
import yaml
from pathlib import Path
from setuptools import find_packages, setup
from setuptools.command.build import build
PACKAGE_NAME = "{{ package_name }}"
class ToolMetaCacheBuild(build):
def run(self):
from promptflow import PFClient
pf_client = PFClient()
tools = pf_client.tools._list_tools_in_package(PACKAGE_NAME, raise_error=False)
# Generate tool meta catch file.
meta_cache_file = Path(__file__).parent / PACKAGE_NAME / "yamls" / "tools_meta.yaml"
meta_cache_file.parent.mkdir(parents=True, exist_ok=True)
with open(meta_cache_file, "w", encoding="utf-8") as f:
yaml.safe_dump(tools, f)
super().run()
# Remove temporary cache file
meta_cache_file.unlink(missing_ok=True)
meta_cache_file.parent.rmdir()
setup(
name=PACKAGE_NAME,
version="0.0.1",
description="This is my tools package",
packages=find_packages(),
entry_points={
"package_tools": ["{{ package_name }} = {{ package_name }}.utils:list_package_tools"],
},
install_requires=[
"promptflow",
],
package_data={PACKAGE_NAME: ["yamls/*.yaml"]},
cmdclass={
'build': ToolMetaCacheBuild, # Generate tool meta cache file in package
},
include_package_data=True, # This line tells setuptools to include files from MANIFEST.in
)
| promptflow/src/promptflow/promptflow/_cli/data/package_tool/setup.py.jinja2/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_cli/data/package_tool/setup.py.jinja2",
"repo_id": "promptflow",
"token_count": 556
} | 9 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import sys
from contextvars import ContextVar
from datetime import datetime, timezone
from io import StringIO, TextIOBase
from typing import Dict
from promptflow._utils.logger_utils import flow_logger, logger, scrub_credentials
class NodeInfo:
def __init__(self, run_id: str, node_name: str, line_number: int):
self.run_id = run_id
self.node_name = node_name
self.line_number = line_number
def __str__(self) -> str:
return f"{self.node_name} in line {self.line_number} (index starts from 0)"
class NodeLogManager:
"""Replace sys.stdout and sys.stderr with NodeLogWriter.
This class intercepts and saves logs to stdout/stderr when executing a node. For example:
with NodeLogManager() as log_manager:
print('test stdout')
print('test stderr', file=sys.stderr)
log_manager.get_logs() will return: {'stdout': 'test stdout\n', 'stderr': 'test stderr\n'}
"""
def __init__(self, record_datetime=True):
self.stdout_logger = NodeLogWriter(sys.stdout, record_datetime)
self.stderr_logger = NodeLogWriter(sys.stderr, record_datetime, is_stderr=True)
self.log_handler = None
def __enter__(self):
"""Replace sys.stdout and sys.stderr with NodeLogWriter."""
self._prev_stdout = sys.stdout
self._prev_stderr = sys.stderr
sys.stdout = self.stdout_logger
sys.stderr = self.stderr_logger
return self
def __exit__(self, *args):
"""Restore sys.stdout and sys.stderr."""
sys.stdout = self._prev_stdout
sys.stderr = self._prev_stderr
def set_node_context(self, run_id: str, node_name: str, line_number: int):
"""Set node context."""
self.stdout_logger.set_node_info(run_id, node_name, line_number)
self.stderr_logger.set_node_info(run_id, node_name, line_number)
def clear_node_context(self, run_id):
"""Clear node context."""
self.stdout_logger.clear_node_info(run_id)
self.stderr_logger.clear_node_info(run_id)
def get_logs(self, run_id) -> Dict[str, str]:
return {
"stdout": self.stdout_logger.get_log(run_id),
"stderr": self.stderr_logger.get_log(run_id),
}
class NodeLogWriter(TextIOBase):
"""Record node run logs."""
DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S%z"
def __init__(self, prev_stdout, record_datetime=True, is_stderr=False):
self.run_id_to_stdout = dict()
self._context = ContextVar("run_log_info", default=None)
self._prev_out = prev_stdout
self._record_datetime = record_datetime
self._is_stderr = is_stderr
def set_node_info(self, run_id: str, node_name: str, line_number: int = None):
"""Set node info to a context variable.
After set node info, write method will write to stringio associated with this node.
"""
run_log_info = NodeInfo(run_id, node_name, line_number)
self._context.set(run_log_info)
self.run_id_to_stdout.update({run_id: StringIO()})
def clear_node_info(self, run_id: str):
"""Clear context variable associated with run id."""
log_info: NodeInfo = self._context.get()
if log_info and log_info.run_id == run_id:
self._context.set(None)
if run_id in self.run_id_to_stdout:
self.run_id_to_stdout.pop(run_id)
def get_log(self, run_id: str) -> str:
"""Get log associated with run id."""
string_io: StringIO = self.run_id_to_stdout.get(run_id)
if string_io is None:
return None
return string_io.getvalue()
def write(self, s: str):
"""Override TextIO's write method and writes input string into a stringio
The written string is compliant without any credentials.
The string is also recorded to flow/bulk logger.
If node info is not set, write to previous stdout.
"""
log_info: NodeInfo = self._context.get()
s = scrub_credentials(s) # Remove credential from string.
if log_info is None:
self._prev_out.write(s)
else:
self._write_to_flow_log(log_info, s)
stdout: StringIO = self.run_id_to_stdout.get(log_info.run_id)
if self._record_datetime and s != "\n": # For line breaker, do not add datetime prefix.
s = f"[{datetime.now(timezone.utc).strftime(self.DATETIME_FORMAT)}] {s}"
stdout.write(s)
def flush(self):
"""Override TextIO's flush method."""
node_info: NodeInfo = self._context.get()
if node_info is None:
self._prev_out.flush()
else:
string_io = self.run_id_to_stdout.get(node_info.run_id)
if string_io is not None:
string_io.flush()
def _write_to_flow_log(self, log_info: NodeInfo, s: str):
"""Save stdout log to flow_logger and stderr log to logger."""
# If user uses "print('log message.')" to log, then
# "write" method will be called twice and the second time input is only '\n'.
# For this case, should not log '\n' in flow_logger.
if s != "\n":
if self._is_stderr:
flow_log = f"[{str(log_info)}] stderr> " + s.rstrip("\n")
# Log stderr in all scenarios so we can diagnose problems.
logger.warning(flow_log)
else:
flow_log = f"[{str(log_info)}] stdout> " + s.rstrip("\n")
# Log stdout only in flow mode.
flow_logger.info(flow_log)
| promptflow/src/promptflow/promptflow/_core/log_manager.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_core/log_manager.py",
"repo_id": "promptflow",
"token_count": 2523
} | 10 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from os import PathLike
from pathlib import Path
from typing import IO, AnyStr, Optional, Union
from dotenv import dotenv_values
from .._utils.logger_utils import get_cli_sdk_logger
from .._utils.yaml_utils import load_yaml
from ._errors import MultipleExperimentTemplateError, NoExperimentTemplateError
from .entities import Run
from .entities._connection import CustomConnection, _Connection
from .entities._experiment import ExperimentTemplate
from .entities._flow import Flow
logger = get_cli_sdk_logger()
def load_common(
cls,
source: Union[str, PathLike, IO[AnyStr]],
relative_origin: str = None,
params_override: Optional[list] = None,
**kwargs,
):
"""Private function to load a yaml file to an entity object.
:param cls: The entity class type.
:type cls: type[Resource]
:param source: A source of yaml.
:type source: Union[str, PathLike, IO[AnyStr]]
:param relative_origin: The origin of to be used when deducing
the relative locations of files referenced in the parsed yaml.
Must be provided, and is assumed to be assigned by other internal
functions that call this.
:type relative_origin: str
:param params_override: _description_, defaults to None
:type params_override: list, optional
"""
if relative_origin is None:
if isinstance(source, (str, PathLike)):
relative_origin = source
else:
try:
relative_origin = source.name
except AttributeError: # input is a stream or something
relative_origin = "./"
params_override = params_override or []
yaml_dict = load_yaml(source)
logger.debug(f"Resolve cls and type with {yaml_dict}, params_override {params_override}.")
# pylint: disable=protected-access
cls, type_str = cls._resolve_cls_and_type(data=yaml_dict, params_override=params_override)
try:
return cls._load(
data=yaml_dict,
yaml_path=relative_origin,
params_override=params_override,
**kwargs,
)
except Exception as e:
raise Exception(f"Load entity error: {e}") from e
def load_flow(
source: Union[str, PathLike, IO[AnyStr]],
**kwargs,
) -> Flow:
"""Load flow from YAML file.
:param source: The local yaml source of a flow. Must be a path to a local file.
If the source is a path, it will be open and read.
An exception is raised if the file does not exist.
:type source: Union[PathLike, str]
:return: A Flow object
:rtype: Flow
"""
return Flow.load(source, **kwargs)
def load_run(
source: Union[str, PathLike, IO[AnyStr]],
params_override: Optional[list] = None,
**kwargs,
) -> Run:
"""Load run from YAML file.
:param source: The local yaml source of a run. Must be a path to a local file.
If the source is a path, it will be open and read.
An exception is raised if the file does not exist.
:type source: Union[PathLike, str]
:param params_override: Fields to overwrite on top of the yaml file.
Format is [{"field1": "value1"}, {"field2": "value2"}]
:type params_override: List[Dict]
:return: A Run object
:rtype: Run
"""
data = load_yaml(source=source)
return Run._load(data=data, yaml_path=source, params_override=params_override, **kwargs)
def load_connection(
source: Union[str, PathLike, IO[AnyStr]],
**kwargs,
):
"""Load connection from YAML file or .env file.
:param source: The local yaml source of a connection or .env file. Must be a path to a local file.
If the source is a path, it will be open and read.
An exception is raised if the file does not exist.
:type source: Union[PathLike, str]
:return: A Connection object
:rtype: Connection
"""
if Path(source).name.endswith(".env"):
return _load_env_to_connection(source, **kwargs)
return load_common(_Connection, source, **kwargs)
def _load_env_to_connection(
source,
params_override: Optional[list] = None,
**kwargs,
):
source = Path(source)
name = next((_dct["name"] for _dct in params_override if "name" in _dct), None)
if not name:
raise Exception("Please specify --name when creating connection from .env.")
if not source.exists():
raise FileNotFoundError(f"File {source.absolute().as_posix()!r} not found.")
try:
data = dict(dotenv_values(source))
if not data:
# Handle some special case dotenv returns empty with no exception raised.
raise ValueError(
f"Load nothing from dotenv file {source.absolute().as_posix()!r}, "
"please make sure the file is not empty and readable."
)
return CustomConnection(name=name, secrets=data)
except Exception as e:
raise Exception(f"Load entity error: {e}") from e
def _load_experiment_template(
source: Union[str, PathLike, IO[AnyStr]],
**kwargs,
):
"""Load experiment template from YAML file.
:param source: The local yaml source of an experiment template. Must be a path to a local file.
If the source is a path, it will be open and read.
An exception is raised if the file does not exist.
:type source: Union[PathLike, str]
:return: An ExperimentTemplate object
:rtype: ExperimentTemplate
"""
source_path = Path(source)
if source_path.is_dir():
target_yaml_list = []
for item in list(source_path.iterdir()):
if item.name.endswith(".exp.yaml"):
target_yaml_list.append(item)
if len(target_yaml_list) > 1:
raise MultipleExperimentTemplateError(
f"Multiple experiment template files found in {source_path.resolve().absolute().as_posix()}, "
f"please specify one."
)
if not target_yaml_list:
raise NoExperimentTemplateError(
f"Experiment template file not found in {source_path.resolve().absolute().as_posix()}."
)
source_path = target_yaml_list[0]
if not source_path.exists():
raise NoExperimentTemplateError(
f"Experiment template file {source_path.resolve().absolute().as_posix()} not found."
)
return load_common(ExperimentTemplate, source=source_path)
| promptflow/src/promptflow/promptflow/_sdk/_load_functions.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/_load_functions.py",
"repo_id": "promptflow",
"token_count": 2496
} | 11 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from promptflow.exceptions import ErrorTarget, UserErrorException
class BadRequest(UserErrorException):
pass
class JsonPayloadRequiredForMultipleInputFields(BadRequest):
pass
class MissingRequiredFlowInput(BadRequest):
pass
class FlowConnectionError(UserErrorException):
pass
class UnsupportedConnectionProvider(FlowConnectionError):
def __init__(self, provider):
super().__init__(
message_format="Unsupported connection provider {provider}, " "supported are 'local' and typing.Callable.",
provider=provider,
target=ErrorTarget.FLOW_INVOKER,
)
class MissingConnectionProvider(FlowConnectionError):
pass
class InvalidConnectionData(FlowConnectionError):
def __init__(self, connection_name):
super().__init__(
message_format="Invalid connection data detected while overriding connection {connection_name}.",
connection_name=connection_name,
target=ErrorTarget.FLOW_INVOKER,
)
class UnexpectedConnectionProviderReturn(FlowConnectionError):
pass
class MultipleStreamOutputFieldsNotSupported(UserErrorException):
def __init__(self):
super().__init__(
"Multiple stream output fields not supported.",
target=ErrorTarget.SERVING_APP,
)
class NotAcceptable(UserErrorException):
def __init__(self, media_type, supported_media_types):
super().__init__(
message_format="Media type {media_type} in Accept header is not acceptable. "
"Supported media type(s) - {supported_media_types}",
media_type=media_type,
supported_media_types=supported_media_types,
target=ErrorTarget.SERVING_APP,
)
| promptflow/src/promptflow/promptflow/_sdk/_serving/_errors.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/_serving/_errors.py",
"repo_id": "promptflow",
"token_count": 664
} | 12 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import json
import time
from types import GeneratorType
from flask import Response, jsonify
from werkzeug.datastructures import MIMEAccept
from promptflow._sdk._serving._errors import MultipleStreamOutputFieldsNotSupported, NotAcceptable
class ResponseCreator:
"""Generates http response from flow run result."""
def __init__(
self,
flow_run_result,
accept_mimetypes,
stream_start_callback_func=None,
stream_end_callback_func=None,
stream_event_callback_func=None,
):
# Fields that are with GeneratorType are streaming outputs.
stream_fields = [k for k, v in flow_run_result.items() if isinstance(v, GeneratorType)]
if len(stream_fields) > 1:
raise MultipleStreamOutputFieldsNotSupported()
self.stream_field_name = stream_fields[0] if stream_fields else None
self.stream_iterator = flow_run_result.pop(self.stream_field_name, None)
self.non_stream_fields = flow_run_result
# According to RFC2616, if "Accept" header is not specified,
# then it is assumed that the client accepts all media types.
# Set */* as the default value here.
# https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
if not accept_mimetypes:
accept_mimetypes = MIMEAccept([("*/*", 1)])
self.accept_mimetypes = accept_mimetypes
self._on_stream_start = stream_start_callback_func
self._on_stream_end = stream_end_callback_func
self._on_stream_event = stream_event_callback_func
@property
def has_stream_field(self):
return self.stream_field_name is not None
@property
def text_stream_specified_explicitly(self):
"""Returns True only when text/event-stream is specified explicitly.
For other cases like */* or text/*, it will return False.
"""
return "text/event-stream" in self.accept_mimetypes.values()
@property
def accept_json(self):
"""Returns True if the Accept header includes application/json.
It also returns True when specified with */* or application/*.
"""
return self.accept_mimetypes.accept_json
def create_text_stream_response(self):
def format_event(data):
return f"data: {json.dumps(data)}\n\n"
def generate():
start_time = time.time()
if self._on_stream_start:
self._on_stream_start()
# If there are non streaming fields, yield them firstly.
if self.non_stream_fields:
yield format_event(self.non_stream_fields)
# If there is stream field, read and yield data until the end.
if self.stream_iterator is not None:
for chunk in self.stream_iterator:
if self._on_stream_event:
self._on_stream_event(chunk)
yield format_event({self.stream_field_name: chunk})
if self._on_stream_end:
duration = (time.time() - start_time) * 1000
self._on_stream_end(duration)
return Response(generate(), mimetype="text/event-stream")
def create_json_response(self):
# If there is stream field, iterate over it and get the merged result.
if self.stream_iterator is not None:
merged_text = "".join(self.stream_iterator)
self.non_stream_fields[self.stream_field_name] = merged_text
return jsonify(self.non_stream_fields)
def create_response(self):
if self.has_stream_field:
if self.text_stream_specified_explicitly:
return self.create_text_stream_response()
elif self.accept_json:
return self.create_json_response()
else:
raise NotAcceptable(
media_type=self.accept_mimetypes, supported_media_types="text/event-stream, application/json"
)
else:
if self.accept_json:
return self.create_json_response()
else:
raise NotAcceptable(media_type=self.accept_mimetypes, supported_media_types="application/json")
| promptflow/src/promptflow/promptflow/_sdk/_serving/response_creator.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/_serving/response_creator.py",
"repo_id": "promptflow",
"token_count": 1814
} | 13 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from ._asset_utils import IgnoreFile, get_ignore_file, get_upload_files_from_folder
__all__ = ["get_ignore_file", "IgnoreFile", "get_upload_files_from_folder"]
| promptflow/src/promptflow/promptflow/_sdk/_vendor/__init__.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/_vendor/__init__.py",
"repo_id": "promptflow",
"token_count": 79
} | 14 |
import os
import sys
from promptflow._cli._pf._connection import create_connection
from streamlit.web import cli as st_cli
from streamlit.runtime import exists
from main import start
def is_yaml_file(file_path):
# Get the file extension
_, file_extension = os.path.splitext(file_path)
# Check if the file extension is ".yaml" or ".yml"
return file_extension.lower() in ('.yaml', '.yml')
def create_connections(directory_path) -> None:
for root, dirs, files in os.walk(directory_path):
for file in files:
file_path = os.path.join(root, file)
if is_yaml_file(file_path):
create_connection(file_path)
if __name__ == "__main__":
create_connections(os.path.join(os.path.dirname(__file__), "connections"))
if exists():
start()
else:
main_script = os.path.join(os.path.dirname(__file__), "main.py")
sys.argv = ["streamlit", "run", main_script, "--global.developmentMode=false", "--client.toolbarMode=viewer", "--browser.gatherUsageStats=false"]
st_cli.main(prog_name="streamlit")
| promptflow/src/promptflow/promptflow/_sdk/data/executable/app.py.jinja2/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/data/executable/app.py.jinja2",
"repo_id": "promptflow",
"token_count": 435
} | 15 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from .core import MutableValidationResult, ValidationResult, ValidationResultBuilder
from .schema import SchemaValidatableMixin
__all__ = [
"SchemaValidatableMixin",
"MutableValidationResult",
"ValidationResult",
"ValidationResultBuilder",
]
| promptflow/src/promptflow/promptflow/_sdk/entities/_validation/__init__.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/entities/_validation/__init__.py",
"repo_id": "promptflow",
"token_count": 106
} | 16 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import copy
from marshmallow import ValidationError, fields, pre_dump, validates
from promptflow._sdk._constants import (
SCHEMA_KEYS_CONTEXT_CONFIG_KEY,
SCHEMA_KEYS_CONTEXT_SECRET_KEY,
ConnectionType,
CustomStrongTypeConnectionConfigs,
)
from promptflow._sdk.schemas._base import YamlFileSchema
from promptflow._sdk.schemas._fields import StringTransformedEnum
from promptflow._utils.utils import camel_to_snake
def _casting_type(typ):
type_dict = {
ConnectionType.AZURE_OPEN_AI: "azure_open_ai",
ConnectionType.OPEN_AI: "open_ai",
}
if typ in type_dict:
return type_dict.get(typ)
return camel_to_snake(typ)
class ConnectionSchema(YamlFileSchema):
name = fields.Str(attribute="name")
module = fields.Str(dump_default="promptflow.connections")
created_date = fields.Str(dump_only=True)
last_modified_date = fields.Str(dump_only=True)
expiry_time = fields.Str(dump_only=True)
@pre_dump
def _pre_dump(self, data, **kwargs):
from promptflow._sdk.entities._connection import _Connection
if not isinstance(data, _Connection):
return data
# Update the type replica of the connection object to match schema
copied = copy.deepcopy(data)
copied.type = camel_to_snake(copied.type)
return copied
class AzureOpenAIConnectionSchema(ConnectionSchema):
type = StringTransformedEnum(allowed_values="azure_open_ai", required=True)
api_key = fields.Str(required=True)
api_base = fields.Str(required=True)
api_type = fields.Str(dump_default="azure")
api_version = fields.Str(dump_default="2023-07-01-preview")
class OpenAIConnectionSchema(ConnectionSchema):
type = StringTransformedEnum(allowed_values="open_ai", required=True)
api_key = fields.Str(required=True)
organization = fields.Str()
base_url = fields.Str()
class EmbeddingStoreConnectionSchema(ConnectionSchema):
module = fields.Str(dump_default="promptflow_vectordb.connections")
api_key = fields.Str(required=True)
api_base = fields.Str(required=True)
class QdrantConnectionSchema(EmbeddingStoreConnectionSchema):
type = StringTransformedEnum(allowed_values=camel_to_snake(ConnectionType.QDRANT), required=True)
class WeaviateConnectionSchema(EmbeddingStoreConnectionSchema):
type = StringTransformedEnum(allowed_values=camel_to_snake(ConnectionType.WEAVIATE), required=True)
class CognitiveSearchConnectionSchema(ConnectionSchema):
type = StringTransformedEnum(
allowed_values=camel_to_snake(ConnectionType.COGNITIVE_SEARCH),
required=True,
)
api_key = fields.Str(required=True)
api_base = fields.Str(required=True)
api_version = fields.Str(dump_default="2023-07-01-Preview")
class SerpConnectionSchema(ConnectionSchema):
type = StringTransformedEnum(allowed_values=camel_to_snake(ConnectionType.SERP), required=True)
api_key = fields.Str(required=True)
class AzureContentSafetyConnectionSchema(ConnectionSchema):
type = StringTransformedEnum(
allowed_values=camel_to_snake(ConnectionType.AZURE_CONTENT_SAFETY),
required=True,
)
api_key = fields.Str(required=True)
endpoint = fields.Str(required=True)
api_version = fields.Str(dump_default="2023-10-01")
api_type = fields.Str(dump_default="Content Safety")
class FormRecognizerConnectionSchema(ConnectionSchema):
type = StringTransformedEnum(
allowed_values=camel_to_snake(ConnectionType.FORM_RECOGNIZER),
required=True,
)
api_key = fields.Str(required=True)
endpoint = fields.Str(required=True)
api_version = fields.Str(dump_default="2023-07-31")
api_type = fields.Str(dump_default="Form Recognizer")
class CustomConnectionSchema(ConnectionSchema):
type = StringTransformedEnum(allowed_values=camel_to_snake(ConnectionType.CUSTOM), required=True)
configs = fields.Dict(keys=fields.Str(), values=fields.Str())
# Secrets is a must-have field for CustomConnection
secrets = fields.Dict(keys=fields.Str(), values=fields.Str(), required=True)
class CustomStrongTypeConnectionSchema(CustomConnectionSchema):
name = fields.Str(attribute="name")
module = fields.Str(required=True)
custom_type = fields.Str(required=True)
package = fields.Str(required=True)
package_version = fields.Str(required=True)
# TODO: validate configs and secrets
@validates("configs")
def validate_configs(self, value):
schema_config_keys = self.context.get(SCHEMA_KEYS_CONTEXT_CONFIG_KEY, None)
if schema_config_keys:
for key in value:
if CustomStrongTypeConnectionConfigs.is_custom_key(key) and key not in schema_config_keys:
raise ValidationError(f"Invalid config key {key}, please check the schema.")
@validates("secrets")
def validate_secrets(self, value):
schema_secret_keys = self.context.get(SCHEMA_KEYS_CONTEXT_SECRET_KEY, None)
if schema_secret_keys:
for key in value:
if key not in schema_secret_keys:
raise ValidationError(f"Invalid secret key {key}, please check the schema.")
| promptflow/src/promptflow/promptflow/_sdk/schemas/_connection.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/schemas/_connection.py",
"repo_id": "promptflow",
"token_count": 1953
} | 17 |
from dataclasses import dataclass
from enum import Enum
from typing import Optional
class FeatureState(Enum):
"""The enum of feature state.
READY: The feature is ready to use.
E2ETEST: The feature is not ready to be shipped to customer and is in e2e testing.
"""
READY = "Ready"
E2ETEST = "E2ETest"
@dataclass
class Feature:
"""The dataclass of feature."""
name: str
description: str
state: FeatureState
component: Optional[str] = "executor"
def get_feature_list():
feature_list = [
Feature(
name="ActivateConfig",
description="Bypass node execution when the node does not meet activate condition.",
state=FeatureState.READY,
),
Feature(
name="Image",
description="Support image input and output.",
state=FeatureState.READY,
),
Feature(
name="EnvironmentVariablesInYaml",
description="Support environment variables in flow.dag.yaml.",
state=FeatureState.READY,
),
Feature(
name="BatchTimeout",
description="Support batch timeout.",
state=FeatureState.READY,
),
Feature(
name="BatchWorkerCount",
description="Supports users explicitly specifying the worker count for batch run.",
state=FeatureState.READY,
),
]
return feature_list
| promptflow/src/promptflow/promptflow/_utils/feature_utils.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_utils/feature_utils.py",
"repo_id": "promptflow",
"token_count": 607
} | 18 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
from ._pf_client import PFClient
__all__ = ["PFClient"]
| promptflow/src/promptflow/promptflow/azure/__init__.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/azure/__init__.py",
"repo_id": "promptflow",
"token_count": 75
} | 19 |
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.2, generator: @autorest/[email protected])
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.core.pipeline.transport import HttpRequest
def _convert_request(request, files=None):
data = request.content if not files else None
request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data)
if files:
request.set_formdata_body(files)
return request
def _format_url_section(template, **kwargs):
components = template.split("/")
while components:
try:
return template.format(**kwargs)
except KeyError as key:
formatted_components = template.split("/")
components = [
c for c in formatted_components if "{}".format(key.args[0]) not in c
]
template = "/".join(components)
| promptflow/src/promptflow/promptflow/azure/_restclient/flow/_vendor.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/azure/_restclient/flow/_vendor.py",
"repo_id": "promptflow",
"token_count": 365
} | 20 |
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.2, generator: @autorest/[email protected])
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._tools_operations import build_get_dynamic_list_request, build_get_package_tools_request, build_get_samples_request, build_get_tool_meta_request, build_get_tool_meta_v2_request, build_get_tool_setting_request, build_retrieve_tool_func_result_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ToolsOperations:
"""ToolsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~flow.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def get_tool_setting(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
**kwargs: Any
) -> "_models.ToolSetting":
"""get_tool_setting.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ToolSetting, or the result of cls(response)
:rtype: ~flow.models.ToolSetting
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ToolSetting"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_tool_setting_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
template_url=self.get_tool_setting.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ToolSetting', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_tool_setting.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/setting'} # type: ignore
@distributed_trace_async
async def get_samples(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
**kwargs: Any
) -> Dict[str, "_models.Tool"]:
"""get_samples.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: dict mapping str to Tool, or the result of cls(response)
:rtype: dict[str, ~flow.models.Tool]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, "_models.Tool"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_samples_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
template_url=self.get_samples.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('{Tool}', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_samples.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/samples'} # type: ignore
@distributed_trace_async
async def get_tool_meta(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
tool_name: str,
tool_type: str,
endpoint_name: Optional[str] = None,
flow_runtime_name: Optional[str] = None,
flow_id: Optional[str] = None,
data: Optional[str] = None,
**kwargs: Any
) -> str:
"""get_tool_meta.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param tool_name:
:type tool_name: str
:param tool_type:
:type tool_type: str
:param endpoint_name:
:type endpoint_name: str
:param flow_runtime_name:
:type flow_runtime_name: str
:param flow_id:
:type flow_id: str
:param data:
:type data: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "text/plain") # type: Optional[str]
_content = data
request = build_get_tool_meta_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
content_type=content_type,
tool_name=tool_name,
tool_type=tool_type,
content=_content,
endpoint_name=endpoint_name,
flow_runtime_name=flow_runtime_name,
flow_id=flow_id,
template_url=self.get_tool_meta.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_tool_meta.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/meta'} # type: ignore
@distributed_trace_async
async def get_tool_meta_v2(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
flow_runtime_name: Optional[str] = None,
flow_id: Optional[str] = None,
body: Optional["_models.GenerateToolMetaRequest"] = None,
**kwargs: Any
) -> "_models.ToolMetaDto":
"""get_tool_meta_v2.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param flow_runtime_name:
:type flow_runtime_name: str
:param flow_id:
:type flow_id: str
:param body:
:type body: ~flow.models.GenerateToolMetaRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ToolMetaDto, or the result of cls(response)
:rtype: ~flow.models.ToolMetaDto
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ToolMetaDto"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if body is not None:
_json = self._serialize.body(body, 'GenerateToolMetaRequest')
else:
_json = None
request = build_get_tool_meta_v2_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
content_type=content_type,
json=_json,
flow_runtime_name=flow_runtime_name,
flow_id=flow_id,
template_url=self.get_tool_meta_v2.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ToolMetaDto', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_tool_meta_v2.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/meta-v2'} # type: ignore
@distributed_trace_async
async def get_package_tools(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
flow_runtime_name: Optional[str] = None,
flow_id: Optional[str] = None,
**kwargs: Any
) -> Dict[str, "_models.Tool"]:
"""get_package_tools.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param flow_runtime_name:
:type flow_runtime_name: str
:param flow_id:
:type flow_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: dict mapping str to Tool, or the result of cls(response)
:rtype: dict[str, ~flow.models.Tool]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, "_models.Tool"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_package_tools_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
flow_runtime_name=flow_runtime_name,
flow_id=flow_id,
template_url=self.get_package_tools.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('{Tool}', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_package_tools.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/packageTools'} # type: ignore
@distributed_trace_async
async def get_dynamic_list(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
flow_runtime_name: Optional[str] = None,
flow_id: Optional[str] = None,
body: Optional["_models.GetDynamicListRequest"] = None,
**kwargs: Any
) -> List[Any]:
"""get_dynamic_list.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param flow_runtime_name:
:type flow_runtime_name: str
:param flow_id:
:type flow_id: str
:param body:
:type body: ~flow.models.GetDynamicListRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of any, or the result of cls(response)
:rtype: list[any]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[Any]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if body is not None:
_json = self._serialize.body(body, 'GetDynamicListRequest')
else:
_json = None
request = build_get_dynamic_list_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
content_type=content_type,
json=_json,
flow_runtime_name=flow_runtime_name,
flow_id=flow_id,
template_url=self.get_dynamic_list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('[object]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_dynamic_list.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/dynamicList'} # type: ignore
@distributed_trace_async
async def retrieve_tool_func_result(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
flow_runtime_name: Optional[str] = None,
flow_id: Optional[str] = None,
body: Optional["_models.RetrieveToolFuncResultRequest"] = None,
**kwargs: Any
) -> "_models.ToolFuncResponse":
"""retrieve_tool_func_result.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param flow_runtime_name:
:type flow_runtime_name: str
:param flow_id:
:type flow_id: str
:param body:
:type body: ~flow.models.RetrieveToolFuncResultRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ToolFuncResponse, or the result of cls(response)
:rtype: ~flow.models.ToolFuncResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ToolFuncResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if body is not None:
_json = self._serialize.body(body, 'RetrieveToolFuncResultRequest')
else:
_json = None
request = build_retrieve_tool_func_result_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
content_type=content_type,
json=_json,
flow_runtime_name=flow_runtime_name,
flow_id=flow_id,
template_url=self.retrieve_tool_func_result.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ToolFuncResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
retrieve_tool_func_result.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Tools/RetrieveToolFuncResult'} # type: ignore
| promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_tools_operations.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_tools_operations.py",
"repo_id": "promptflow",
"token_count": 8893
} | 21 |
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.2, generator: @autorest/[email protected])
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
# fmt: off
def build_get_index_entity_by_id_request(
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
content_type = kwargs.pop('content_type', None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/flow/v1.0/flows/getIndexEntities')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
headers=header_parameters,
**kwargs
)
def build_get_updated_entity_ids_for_workspace_request(
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
content_type = kwargs.pop('content_type', None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/flow/v1.0/flows/rebuildIndex')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
headers=header_parameters,
**kwargs
)
# fmt: on
class FlowsProviderOperations(object):
"""FlowsProviderOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~flow.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get_index_entity_by_id(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
body=None, # type: Optional["_models.UnversionedEntityRequestDto"]
**kwargs # type: Any
):
# type: (...) -> "_models.UnversionedEntityResponseDto"
"""get_index_entity_by_id.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param body:
:type body: ~flow.models.UnversionedEntityRequestDto
:keyword callable cls: A custom type or function that will be passed the direct response
:return: UnversionedEntityResponseDto, or the result of cls(response)
:rtype: ~flow.models.UnversionedEntityResponseDto
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.UnversionedEntityResponseDto"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if body is not None:
_json = self._serialize.body(body, 'UnversionedEntityRequestDto')
else:
_json = None
request = build_get_index_entity_by_id_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
content_type=content_type,
json=_json,
template_url=self.get_index_entity_by_id.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('UnversionedEntityResponseDto', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_index_entity_by_id.metadata = {'url': '/flow/v1.0/flows/getIndexEntities'} # type: ignore
@distributed_trace
def get_updated_entity_ids_for_workspace(
self,
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
body=None, # type: Optional["_models.UnversionedRebuildIndexDto"]
**kwargs # type: Any
):
# type: (...) -> "_models.UnversionedRebuildResponseDto"
"""get_updated_entity_ids_for_workspace.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param body:
:type body: ~flow.models.UnversionedRebuildIndexDto
:keyword callable cls: A custom type or function that will be passed the direct response
:return: UnversionedRebuildResponseDto, or the result of cls(response)
:rtype: ~flow.models.UnversionedRebuildResponseDto
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.UnversionedRebuildResponseDto"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if body is not None:
_json = self._serialize.body(body, 'UnversionedRebuildIndexDto')
else:
_json = None
request = build_get_updated_entity_ids_for_workspace_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
content_type=content_type,
json=_json,
template_url=self.get_updated_entity_ids_for_workspace.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('UnversionedRebuildResponseDto', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_updated_entity_ids_for_workspace.metadata = {'url': '/flow/v1.0/flows/rebuildIndex'} # type: ignore
| promptflow/src/promptflow/promptflow/azure/_restclient/flow/operations/_flows_provider_operations.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/azure/_restclient/flow/operations/_flows_provider_operations.py",
"repo_id": "promptflow",
"token_count": 3934
} | 22 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from typing import Dict
from azure.ai.ml._scope_dependent_operations import (
OperationConfig,
OperationsContainer,
OperationScope,
_ScopeDependentOperations,
)
from promptflow._sdk._utils import safe_parse_object_list
from promptflow._sdk.entities._connection import _Connection
from promptflow._utils.logger_utils import get_cli_sdk_logger
from promptflow.azure._entities._workspace_connection_spec import WorkspaceConnectionSpec
from promptflow.azure._restclient.flow_service_caller import FlowServiceCaller
logger = get_cli_sdk_logger()
class ConnectionOperations(_ScopeDependentOperations):
"""ConnectionOperations.
You should not instantiate this class directly. Instead, you should
create an PFClient instance that instantiates it for you and
attaches it as an attribute.
"""
def __init__(
self,
operation_scope: OperationScope,
operation_config: OperationConfig,
all_operations: OperationsContainer,
credential,
service_caller: FlowServiceCaller,
**kwargs: Dict,
):
super(ConnectionOperations, self).__init__(operation_scope, operation_config)
self._all_operations = all_operations
self._service_caller = service_caller
self._credential = credential
def create_or_update(self, connection, **kwargs):
rest_conn = connection._to_rest_object()
# create flow draft
rest_conn_result = self._service_caller.create_connection(
subscription_id=self._operation_scope.subscription_id,
resource_group_name=self._operation_scope.resource_group_name,
workspace_name=self._operation_scope.workspace_name,
connection_name=connection.name,
body=rest_conn,
)
return _Connection._from_mt_rest_object(rest_conn_result)
def get(self, name, **kwargs):
rest_conn = self._service_caller.get_connection(
subscription_id=self._operation_scope.subscription_id,
resource_group_name=self._operation_scope.resource_group_name,
workspace_name=self._operation_scope.workspace_name,
connection_name=name,
**kwargs,
)
return _Connection._from_mt_rest_object(rest_conn)
def delete(self, name, **kwargs):
return self._service_caller.delete_connection(
subscription_id=self._operation_scope.subscription_id,
resource_group_name=self._operation_scope.resource_group_name,
workspace_name=self._operation_scope.workspace_name,
connection_name=name,
**kwargs,
)
def list(self, **kwargs):
rest_connections = self._service_caller.list_connections(
subscription_id=self._operation_scope.subscription_id,
resource_group_name=self._operation_scope.resource_group_name,
workspace_name=self._operation_scope.workspace_name,
**kwargs,
)
return safe_parse_object_list(
obj_list=rest_connections,
parser=_Connection._from_mt_rest_object,
message_generator=lambda x: f"Failed to load connection {x.connection_name}, skipped.",
)
def list_connection_specs(self, **kwargs):
results = self._service_caller.list_connection_specs(
subscription_id=self._operation_scope.subscription_id,
resource_group_name=self._operation_scope.resource_group_name,
workspace_name=self._operation_scope.workspace_name,
**kwargs,
)
return [WorkspaceConnectionSpec._from_rest_object(spec) for spec in results]
| promptflow/src/promptflow/promptflow/azure/operations/_connection_operations.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/azure/operations/_connection_operations.py",
"repo_id": "promptflow",
"token_count": 1476
} | 23 |
import docutils.nodes
from docutils.core import publish_doctree
class DocstringParser:
@staticmethod
def parse(docstring: str):
doctree = publish_doctree(docstring)
description = doctree[0].astext()
params = {}
for field in doctree.traverse(docutils.nodes.field):
field_name = field[0].astext()
field_body = field[1].astext()
if field_name.startswith("param"):
param_name = field_name.split(" ")[1]
if param_name not in params:
params[param_name] = {}
params[param_name]["description"] = field_body
if field_name.startswith("type"):
param_name = field_name.split(" ")[1]
if param_name not in params:
params[param_name] = {}
params[param_name]["type"] = field_body
return description, params
| promptflow/src/promptflow/promptflow/executor/_docstring_parser.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/executor/_docstring_parser.py",
"repo_id": "promptflow",
"token_count": 447
} | 24 |
# Manage flows
:::{admonition} Experimental feature
This is an experimental feature, and may change at any time. Learn [more](../../how-to-guides/faq.md#stable-vs-experimental).
:::
This documentation will walk you through how to manage your flow with CLI and SDK on [Azure AI](https://learn.microsoft.com/en-us/azure/machine-learning/prompt-flow/overview-what-is-prompt-flow?view=azureml-api-2).
The flow examples in this guide come from [examples/flows/standard](https://github.com/microsoft/promptflow/tree/main/examples/flows/standard).
In general:
- For `CLI`, you can run `pfazure flow --help` in the terminal to see help messages.
- For `SDK`, you can refer to [Promptflow Python Library Reference](../../reference/python-library-reference/promptflow.md) and check `promptflow.azure.PFClient.flows` for more flow operations.
:::{admonition} Prerequisites
- Refer to the prerequisites in [Quick start](./quick-start/index.md#prerequisites).
- Use the `az login` command in the command line to log in. This enables promptflow to access your credentials.
:::
Let's take a look at the following topics:
- [Manage flows](#manage-flows)
- [Create a flow](#create-a-flow)
- [List flows](#list-flows)
## Create a flow
::::{tab-set}
:::{tab-item} CLI
:sync: CLI
To set the target workspace, you can either specify it in the CLI command or set default value in the Azure CLI.
You can refer to [Quick start](./quick-start/index.md#submit-a-run-to-workspace) for more information.
To create a flow to Azure from local flow directory, you can use
```bash
# create the flow
pfazure flow create --flow <path-to-flow-folder>
# create the flow with metadata
pfazure flow create --flow <path-to-flow-folder> --set display_name=<display-name> description=<description> tags.key1=value1
```
After the flow is created successfully, you can see the flow summary in the command line.
![img](../../media/cloud/manage-flows/flow_create_0.png)
:::
:::{tab-item} SDK
:sync: SDK
1. Import the required libraries
```python
from azure.identity import DefaultAzureCredential, InteractiveBrowserCredential
# azure version promptflow apis
from promptflow.azure import PFClient
```
2. Get credential
```python
try:
credential = DefaultAzureCredential()
# Check if given credential can get token successfully.
credential.get_token("https://management.azure.com/.default")
except Exception as ex:
# Fall back to InteractiveBrowserCredential in case DefaultAzureCredential not work
credential = InteractiveBrowserCredential()
```
3. Get a handle to the workspace
```python
# Get a handle to workspace
pf = PFClient(
credential=credential,
subscription_id="<SUBSCRIPTION_ID>", # this will look like xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
resource_group_name="<RESOURCE_GROUP>",
workspace_name="<AML_WORKSPACE_NAME>",
)
```
4. Create the flow
```python
# specify flow path
flow = "./web-classification"
# create flow to Azure
flow = pf.flows.create_or_update(
flow=flow, # path to the flow folder
display_name="my-web-classification", # it will be "web-classification-{timestamp}" if not specified
type="standard", # it will be "standard" if not specified
)
```
:::
::::
On Azure portal, you can see the created flow in the flow list.
![img](../../media/cloud/manage-flows/flow_create_1.png)
And the flow source folder on file share is `Users/<alias>/promptflow/<flow-display-name>`:
![img](../../media/cloud/manage-flows/flow_create_2.png)
Note that if the flow display name is not specified, it will default to the flow folder name + timestamp. (e.g. `web-classification-11-13-2023-14-19-10`)
## List flows
::::{tab-set}
:::{tab-item} CLI
:sync: CLI
List flows with default json format:
```bash
pfazure flow list --max-results 1
```
![img](../../media/cloud/manage-flows/flow_list_0.png)
:::
:::{tab-item} SDK
:sync: SDK
```python
# reuse the pf client created in "create a flow" section
flows = pf.flows.list(max_results=1)
```
:::
:::: | promptflow/docs/cloud/azureai/manage-flows.md/0 | {
"file_path": "promptflow/docs/cloud/azureai/manage-flows.md",
"repo_id": "promptflow",
"token_count": 1308
} | 0 |
# Deploy a flow using development server
:::{admonition} Experimental feature
This is an experimental feature, and may change at any time. Learn [more](../faq.md#stable-vs-experimental).
:::
Once you have created and thoroughly tested a flow, you can use it as an HTTP endpoint.
::::{tab-set}
:::{tab-item} CLI
:sync: CLI
We are going to use the [web-classification](https://github.com/microsoft/promptflow/tree/main/examples/flows/standard/web-classification/) as
an example to show how to deploy a flow.
Please ensure you have [create the connection](../manage-connections.md#create-a-connection) required by flow, if not, you could
refer to [Setup connection for web-classification](https://github.com/microsoft/promptflow/tree/main/examples/flows/standard/web-classification).
Note: We will use relevant environment variable ({connection_name}_{key_name}) to override connection configurations in
serving mode, white space in connection name will be removed directly from environment variable name. For instance,
if there is a custom connection named 'custom_connection' with a configuration key called 'chat_deployment_name,' the
function will attempt to retrieve 'chat_deployment_name' from the environment variable
'CUSTOM_CONNECTION_CHAT_DEPLOYMENT_NAME' by default. If the environment variable is not set, it will use the original
value as a fallback.
The following CLI commands allows you serve a flow folder as an endpoint. By running this command, a [flask](https://flask.palletsprojects.com/en/) app will start in the environment where command is executed, please ensure all prerequisites required by flow have been installed.
```bash
# Serve the flow at localhost:8080
pf flow serve --source <path-to-your-flow-folder> --port 8080 --host localhost
```
The expected result is as follows if the flow served successfully, and the process will keep alive until it be killed manually.
![img](../../media/how-to-guides/deploy_flow.png)
:::
:::{tab-item} VS Code Extension
:sync: VSC
In visual editor, choose:
![img](../../media/how-to-guides/vscode_export.png)
then choose format:
![img](../../media/how-to-guides/vscode_export_as_local_app.png)
then in yaml editor:
![img](../../media/how-to-guides/vscode_start_local_app.png)
:::
::::
## Test endpoint
::::{tab-set}
:::{tab-item} Bash
You could open another terminal to test the endpoint with the following command:
```bash
curl http://localhost:8080/score --data '{"url":"https://play.google.com/store/apps/details?id=com.twitter.android"}' -X POST -H "Content-Type: application/json"
```
:::
:::{tab-item} PowerShell
You could open another terminal to test the endpoint with the following command:
```powershell
Invoke-WebRequest -URI http://localhost:8080/score -Body '{"url":"https://play.google.com/store/apps/details?id=com.twitter.android"}' -Method POST -ContentType "application/json"
```
:::
:::{tab-item} Test Page
The development server has a built-in web page you can use to test the flow. Open 'http://localhost:8080' in your browser.
![img](../../media/how-to-guides/deploy_flow_test_page.png)
:::
::::
## Next steps
- Try the example [here](https://github.com/microsoft/promptflow/tree/main/examples/flows/standard/web-classification/).
- See how to [deploy a flow using docker](deploy-using-docker.md).
- See how to [deploy a flow using kubernetes](deploy-using-kubernetes.md).
| promptflow/docs/how-to-guides/deploy-a-flow/deploy-using-dev-server.md/0 | {
"file_path": "promptflow/docs/how-to-guides/deploy-a-flow/deploy-using-dev-server.md",
"repo_id": "promptflow",
"token_count": 999
} | 1 |
# Customizing an LLM Tool
In this document, we will guide you through the process of customizing an LLM tool, allowing users to seamlessly connect to a large language model with prompt tuning experience using a `PromptTemplate`.
## Prerequisites
- Please ensure that your [Prompt flow for VS Code](https://marketplace.visualstudio.com/items?itemName=prompt-flow.prompt-flow) is updated to version 1.2.0 or later.
## How to customize an LLM tool
Here we use [an existing tool package](https://github.com/microsoft/promptflow/tree/main/examples/tools/tool-package-quickstart/my_tool_package) as an example. If you want to create your own tool, please refer to [create and use tool package](create-and-use-tool-package.md).
1. Develop the tool code as in [this example](https://github.com/microsoft/promptflow/blob/main/examples/tools/tool-package-quickstart/my_tool_package/tools/tool_with_custom_llm_type.py).
- Add a `CustomConnection` input to the tool, which is used to authenticate and establish a connection to the large language model.
- Add a `PromptTemplate` input to the tool, which serves as an argument to be passed into the large language model.
```python
from jinja2 import Template
from promptflow import tool
from promptflow.connections import CustomConnection
from promptflow.contracts.types import PromptTemplate
@tool
def my_tool(connection: CustomConnection, prompt: PromptTemplate, **kwargs) -> str:
# Customize your own code to use the connection and prompt here.
rendered_prompt = Template(prompt, trim_blocks=True, keep_trailing_newline=True).render(**kwargs)
return rendered_prompt
```
2. Generate the custom LLM tool YAML.
Run the command below in your tool project directory to automatically generate your tool YAML, use _-t "custom_llm"_ or _--tool-type "custom_llm"_ to indicate this is a custom LLM tool:
```
python <promptflow github repo>\scripts\tool\generate_package_tool_meta.py -m <tool_module> -o <tool_yaml_path> -t "custom_llm"
```
Here we use [an existing tool](https://github.com/microsoft/promptflow/blob/main/examples/tools/tool-package-quickstart/my_tool_package/yamls/tool_with_custom_llm_type.yaml) as an example.
```
cd D:\proj\github\promptflow\examples\tools\tool-package-quickstart
python D:\proj\github\promptflow\scripts\tool\generate_package_tool_meta.py -m my_tool_package.tools.tool_with_custom_llm_type -o my_tool_package\yamls\tool_with_custom_llm_type.yaml -n "My Custom LLM Tool" -d "This is a tool to demonstrate how to customize an LLM tool with a PromptTemplate." -t "custom_llm"
```
This command will generate a YAML file as follows:
```yaml
my_tool_package.tools.tool_with_custom_llm_type.my_tool:
name: My Custom LLM Tool
description: This is a tool to demonstrate how to customize an LLM tool with a PromptTemplate.
# The type is custom_llm.
type: custom_llm
module: my_tool_package.tools.tool_with_custom_llm_type
function: my_tool
inputs:
connection:
type:
- CustomConnection
```
## Use the tool in VS Code
Follow the steps to [build and install your tool package](create-and-use-tool-package.md#build-and-share-the-tool-package) and [use your tool from VS Code extension](create-and-use-tool-package.md#use-your-tool-from-vscode-extension).
Here we use an existing flow to demonstrate the experience, open [this flow](https://github.com/microsoft/promptflow/blob/main/examples/tools/use-cases/custom_llm_tool_showcase/flow.dag.yaml) in VS Code extension.
- There is a node named "my_custom_llm_tool" with a prompt template file. You can either use an existing file or create a new one as the prompt template file.
![use_my_custom_llm_tool](../../media/how-to-guides/develop-a-tool/use_my_custom_llm_tool.png)
| promptflow/docs/how-to-guides/develop-a-tool/customize_an_llm_tool.md/0 | {
"file_path": "promptflow/docs/how-to-guides/develop-a-tool/customize_an_llm_tool.md",
"repo_id": "promptflow",
"token_count": 1277
} | 2 |
---
myst:
html_meta:
"description lang=en": "Prompt flow Doc"
"google-site-verification": "rEZN-2h5TVqEco07aaMpqNcDx4bjr2czx1Hwfoxydrg"
html_theme.sidebar_secondary.remove: true
---
# Prompt flow
[**Prompt flow**](https://github.com/microsoft/promptflow) is a suite of development tools designed to streamline the end-to-end development cycle of LLM-based AI applications, from ideation, prototyping, testing, evaluation to production deployment and monitoring. It makes prompt engineering much easier and enables you to build LLM apps with production quality.
With prompt flow, you will be able to:
- **Create [flows](./concepts/concept-flows.md)** that link [LLMs](./reference/tools-reference/llm-tool.md), [prompts](./reference/tools-reference/prompt-tool.md), [Python](./reference/tools-reference/python-tool.md) code and other [tools](./concepts/concept-tools.md) together in a executable workflow.
- **Debug and iterate your flows**, especially the interaction with LLMs with ease.
- **Evaluate your flows**, calculate quality and performance metrics with larger datasets.
- **Integrate the testing and evaluation into your CI/CD system** to ensure quality of your flow.
- **Deploy your flows** to the serving platform you choose or integrate into your app's code base easily.
- (Optional but highly recommended) **Collaborate with your team** by leveraging the cloud version of [Prompt flow in Azure AI](https://learn.microsoft.com/en-us/azure/machine-learning/prompt-flow/overview-what-is-prompt-flow?view=azureml-api-2).
> Welcome to join us to make prompt flow better by
> participating [discussions](https://github.com/microsoft/promptflow/discussions),
> opening [issues](https://github.com/microsoft/promptflow/issues/new/choose),
> submitting [PRs](https://github.com/microsoft/promptflow/pulls).
This documentation site contains guides for prompt flow [sdk, cli](https://pypi.org/project/promptflow/) and [vscode extension](https://marketplace.visualstudio.com/items?itemName=prompt-flow.prompt-flow) users.
```{gallery-grid}
:grid-columns: 1 2 2 2
- header: "🚀 Quick Start"
content: "
Quick start and end-to-end tutorials.<br/><br/>
- [Getting started with prompt flow](how-to-guides/quick-start.md)<br/>
- [E2E development tutorial: chat with PDF](https://github.com/microsoft/promptflow/blob/main/examples/tutorials/e2e-development/chat-with-pdf.md)<br/>
- Find more: [tutorials & samples](tutorials/index.md)<br/>
"
- header: "📒 How-to Guides"
content: "
Articles guide user to complete a specific task in prompt flow.<br/><br/>
- [Develop a flow](how-to-guides/develop-a-flow/index.md)<br/>
- [Initialize and test a flow](how-to-guides/init-and-test-a-flow.md)<br/>
- [Run and evaluate a flow](how-to-guides/run-and-evaluate-a-flow/index.md)<br/>
- [Tune prompts using variants](how-to-guides/tune-prompts-with-variants.md)<br/>
- [Develop custom tool](how-to-guides/develop-a-tool/create-and-use-tool-package.md)<br/>
- [Deploy a flow](how-to-guides/deploy-a-flow/index.md)<br/>
- [Process image in flow](how-to-guides/process-image-in-flow.md)
"
```
```{gallery-grid}
:grid-columns: 1 2 2 2
- header: "📑 Concepts"
content: "
Introduction of key concepts of prompt flow.<br/><br/>
- [Flows](concepts/concept-flows.md)<br/>
- [Tools](concepts/concept-tools.md)<br/>
- [Connections](concepts/concept-connections.md)<br/>
- [Design principles](concepts/design-principles.md)<br/>
"
- header: "🔍 Reference"
content: "
Reference provides technical information about prompt flow API.<br/><br/>
- Command line Interface reference: [pf](reference/pf-command-reference.md)<br/>
- Python library reference: [promptflow](reference/python-library-reference/promptflow.md)<br/>
- Tool reference: [LLM Tool](reference/tools-reference/llm-tool.md), [Python Tool](reference/tools-reference/python-tool.md), [Prompt Tool](reference/tools-reference/prompt-tool.md)<br/>
"
```
```{toctree}
:hidden:
:maxdepth: 1
how-to-guides/quick-start
```
```{toctree}
:hidden:
:maxdepth: 1
how-to-guides/index
```
```{toctree}
:hidden:
:maxdepth: 1
tutorials/index
```
```{toctree}
:hidden:
:maxdepth: 2
concepts/index
```
```{toctree}
:hidden:
:maxdepth: 1
reference/index
```
```{toctree}
:hidden:
:maxdepth: 1
cloud/index
```
```{toctree}
:hidden:
:maxdepth: 1
integrations/index
``` | promptflow/docs/index.md/0 | {
"file_path": "promptflow/docs/index.md",
"repo_id": "promptflow",
"token_count": 1541
} | 3 |
# OpenAI GPT-4V
## Introduction
OpenAI GPT-4V tool enables you to leverage OpenAI's GPT-4 with vision, also referred to as GPT-4V or gpt-4-vision-preview in the API, to take images as input and answer questions about them.
## Prerequisites
- Create OpenAI resources
Sign up account [OpenAI website](https://openai.com/)
Login and [Find personal API key](https://platform.openai.com/account/api-keys)
- Get Access to GPT-4 API
To use GPT-4 with vision, you need access to GPT-4 API. Learn more about [How to get access to GPT-4 API](https://help.openai.com/en/articles/7102672-how-can-i-access-gpt-4)
## Connection
Setup connections to provisioned resources in prompt flow.
| Type | Name | API KEY |
|-------------|----------|----------|
| OpenAI | Required | Required |
## Inputs
| Name | Type | Description | Required |
|------------------------|-------------|------------------------------------------------------------------------------------------------|----------|
| connection | OpenAI | the OpenAI connection to be used in the tool | Yes |
| model | string | the language model to use, currently only support gpt-4-vision-preview | Yes |
| prompt | string | The text prompt that the language model will use to generate it's response. | Yes |
| max\_tokens | integer | the maximum number of tokens to generate in the response. Default is 512. | No |
| temperature | float | the randomness of the generated text. Default is 1. | No |
| stop | list | the stopping sequence for the generated text. Default is null. | No |
| top_p | float | the probability of using the top choice from the generated tokens. Default is 1. | No |
| presence\_penalty | float | value that controls the model's behavior with regards to repeating phrases. Default is 0. | No |
| frequency\_penalty | float | value that controls the model's behavior with regards to generating rare phrases. Default is 0. | No |
## Outputs
| Return Type | Description |
|-------------|------------------------------------------|
| string | The text of one response of conversation |
| promptflow/docs/reference/tools-reference/openai-gpt-4v-tool.md/0 | {
"file_path": "promptflow/docs/reference/tools-reference/openai-gpt-4v-tool.md",
"repo_id": "promptflow",
"token_count": 1118
} | 4 |
from enum import Enum
from typing import Dict, List, Union
import json
import requests
from promptflow import tool, ToolProvider
from promptflow.connections import AzureContentSafetyConnection
from promptflow.tools.exception import AzureContentSafetyInputValueError, AzureContentSafetySystemError
class TextCategorySensitivity(str, Enum):
DISABLE = "disable"
LOW_SENSITIVITY = "low_sensitivity"
MEDIUM_SENSITIVITY = "medium_sensitivity"
HIGH_SENSITIVITY = "high_sensitivity"
class AzureContentSafety(ToolProvider):
"""
Doc reference :
https://review.learn.microsoft.com/en-us/azure/cognitive-services/content-safety/quickstart?branch=pr-en-us-233724&pivots=programming-language-rest
"""
def __init__(self, connection: AzureContentSafetyConnection):
self.connection = connection
super(AzureContentSafety, self).__init__()
@tool
def analyze_text(
self,
text: str,
hate_category: TextCategorySensitivity = TextCategorySensitivity.MEDIUM_SENSITIVITY,
sexual_category: TextCategorySensitivity = TextCategorySensitivity.MEDIUM_SENSITIVITY,
self_harm_category: TextCategorySensitivity = TextCategorySensitivity.MEDIUM_SENSITIVITY,
violence_category: TextCategorySensitivity = TextCategorySensitivity.MEDIUM_SENSITIVITY,
):
content_safety = ContentSafety(self.connection.endpoint, self.connection.api_key, self.connection.api_version)
media_type = MediaType.Text
blocklists = []
detection_result = content_safety.detect(media_type, text, blocklists)
# Set the reject thresholds for each category
reject_thresholds = {
Category.Hate: switch_category_threshold(hate_category),
Category.SelfHarm: switch_category_threshold(self_harm_category),
Category.Sexual: switch_category_threshold(sexual_category),
Category.Violence: switch_category_threshold(violence_category),
}
# Make a decision based on the detection result and reject thresholds
if self.connection.api_version == "2023-10-01":
decision_result = content_safety.make_decision_1001(detection_result, reject_thresholds)
else:
decision_result = content_safety.make_decision(detection_result, reject_thresholds)
return convert_decision_to_json(decision_result)
@tool
def analyze_text(
connection: AzureContentSafetyConnection,
text: str,
hate_category: TextCategorySensitivity = TextCategorySensitivity.MEDIUM_SENSITIVITY,
sexual_category: TextCategorySensitivity = TextCategorySensitivity.MEDIUM_SENSITIVITY,
self_harm_category: TextCategorySensitivity = TextCategorySensitivity.MEDIUM_SENSITIVITY,
violence_category: TextCategorySensitivity = TextCategorySensitivity.MEDIUM_SENSITIVITY,
):
return AzureContentSafety(connection).analyze_text(
text=text,
hate_category=hate_category,
sexual_category=sexual_category,
self_harm_category=self_harm_category,
violence_category=violence_category,
)
def switch_category_threshold(sensitivity: TextCategorySensitivity) -> int:
switcher = {
TextCategorySensitivity.DISABLE: -1,
TextCategorySensitivity.LOW_SENSITIVITY: 6,
TextCategorySensitivity.MEDIUM_SENSITIVITY: 4,
TextCategorySensitivity.HIGH_SENSITIVITY: 2,
}
return switcher.get(sensitivity, f"Non-supported sensitivity: {sensitivity}")
class MediaType(Enum):
Text = 1
Image = 2
class Category(Enum):
Hate = 1
SelfHarm = 2
Sexual = 3
Violence = 4
class Action(Enum):
Accept = "Accept"
Reject = "Reject"
class Decision(object):
def __init__(self, suggested_action: Action, action_by_category: Dict[Category, Action]) -> None:
"""
Represents the decision made by the content moderation system.
Args:
- suggested_action (Action): The suggested action to take.
- action_by_category (dict[Category, Action]): The action to take for each category.
"""
self.suggested_action = suggested_action
self.action_by_category = action_by_category
def convert_decision_to_json(decision: Decision):
result_json = {}
result_json["suggested_action"] = decision.suggested_action.value
category_json = {}
for key, value in decision.action_by_category.items():
category_json[key.name] = value.value
result_json["action_by_category"] = category_json
return result_json
class ContentSafety(object):
def __init__(self, endpoint: str, subscription_key: str, api_version: str) -> None:
"""
Creates a new ContentSafety instance.
Args:
- endpoint (str): The endpoint URL for the Content Safety API.
- subscription_key (str): The subscription key for the Content Safety API.
- api_version (str): The version of the Content Safety API to use.
"""
self.endpoint = endpoint
self.subscription_key = subscription_key
self.api_version = api_version
def build_url(self, media_type: MediaType) -> str:
"""
Builds the URL for the Content Safety API based on the media type.
Args:
- media_type (MediaType): The type of media to analyze.
Returns:
- str: The URL for the Content Safety API.
"""
if media_type == MediaType.Text:
return f"{self.endpoint}/contentsafety/text:analyze?api-version={self.api_version}"
elif media_type == MediaType.Image:
return f"{self.endpoint}/contentsafety/image:analyze?api-version={self.api_version}"
else:
error_message = f"Invalid Media Type {media_type}"
raise AzureContentSafetyInputValueError(message=error_message)
def build_headers(self) -> Dict[str, str]:
"""
Builds the headers for the Content Safety API request.
Returns:
- dict[str, str]: The headers for the Content Safety API request.
"""
return {
"Ocp-Apim-Subscription-Key": self.subscription_key,
"Content-Type": "application/json",
"ms-azure-ai-sender": "prompt_flow"
}
def build_request_body(
self,
media_type: MediaType,
content: str,
blocklists: List[str],
) -> dict:
"""
Builds the request body for the Content Safety API request.
Args:
- media_type (MediaType): The type of media to analyze.
- content (str): The content to analyze.
- blocklists (list[str]): The blocklists to use for text analysis.
Returns:
- dict: The request body for the Content Safety API request.
"""
if media_type == MediaType.Text:
return {
"text": content,
"blocklistNames": blocklists,
}
elif media_type == MediaType.Image:
return {"image": {"content": content}}
else:
error_message = f"Invalid Media Type {media_type}"
raise AzureContentSafetyInputValueError(message=error_message)
def detect(
self,
media_type: MediaType,
content: str,
blocklists: List[str] = [],
) -> dict:
url = self.build_url(media_type)
headers = self.build_headers()
request_body = self.build_request_body(media_type, content, blocklists)
payload = json.dumps(request_body)
response = requests.post(url, headers=headers, data=payload)
print("status code: " + response.status_code.__str__())
print("response txt: " + response.text)
res_content = response.json()
if response.status_code != 200:
error_message = f"Error in detecting content: {res_content['error']['message']}"
raise AzureContentSafetySystemError(message=error_message)
return res_content
def get_detect_result_by_category(self, category: Category, detect_result: dict) -> Union[int, None]:
if category == Category.Hate:
return detect_result.get("hateResult", None)
elif category == Category.SelfHarm:
return detect_result.get("selfHarmResult", None)
elif category == Category.Sexual:
return detect_result.get("sexualResult", None)
elif category == Category.Violence:
return detect_result.get("violenceResult", None)
else:
error_message = f"Invalid Category {category}"
raise AzureContentSafetyInputValueError(message=error_message)
def get_detect_result_by_category_1001(self, category: Category, detect_result: dict) -> Union[int, None]:
category_res = detect_result.get("categoriesAnalysis", None)
for res in category_res:
if category.name == res.get("category", None):
return res
error_message = f"Invalid Category {category}"
raise AzureContentSafetyInputValueError(message=error_message)
def make_decision(
self,
detection_result: dict,
reject_thresholds: Dict[Category, int],
) -> Decision:
action_result = {}
final_action = Action.Accept
for category, threshold in reject_thresholds.items():
if threshold not in (-1, 0, 2, 4, 6):
error_message = "RejectThreshold can only be in (-1, 0, 2, 4, 6)"
raise AzureContentSafetyInputValueError(message=error_message)
cate_detect_res = self.get_detect_result_by_category(category, detection_result)
if cate_detect_res is None or "severity" not in cate_detect_res:
error_message = f"Can not find detection result for {category}"
raise AzureContentSafetySystemError(message=error_message)
severity = cate_detect_res["severity"]
action = Action.Reject if threshold != -1 and severity >= threshold else Action.Accept
action_result[category] = action
if action.value > final_action.value:
final_action = action
if (
"blocklistsMatchResults" in detection_result
and detection_result["blocklistsMatchResults"]
and len(detection_result["blocklistsMatchResults"]) > 0
):
final_action = Action.Reject
print(f"Action result: {action_result}")
return Decision(final_action, action_result)
def make_decision_1001(
self,
detection_result: dict,
reject_thresholds: Dict[Category, int],
) -> Decision:
action_result = {}
final_action = Action.Accept
for category, threshold in reject_thresholds.items():
if threshold not in (-1, 0, 2, 4, 6):
error_message = "RejectThreshold can only be in (-1, 0, 2, 4, 6)"
raise AzureContentSafetyInputValueError(message=error_message)
cate_detect_res = self.get_detect_result_by_category_1001(
category, detection_result
)
if cate_detect_res is None or "severity" not in cate_detect_res:
error_message = f"Can not find detection result for {category}"
raise AzureContentSafetySystemError(message=error_message)
severity = cate_detect_res["severity"]
action = (
Action.Reject
if threshold != -1 and severity >= threshold
else Action.Accept
)
action_result[category] = action
if action.value > final_action.value:
final_action = action
if (
"blocklistsMatch" in detection_result
and detection_result["blocklistsMatch"]
and len(detection_result["blocklistsMatch"]) > 0
):
final_action = Action.Reject
print(f"Action result: {action_result}")
return Decision(final_action, action_result)
| promptflow/src/promptflow-tools/promptflow/tools/azure_content_safety.py/0 | {
"file_path": "promptflow/src/promptflow-tools/promptflow/tools/azure_content_safety.py",
"repo_id": "promptflow",
"token_count": 4937
} | 5 |
import argparse
import json
from promptflow._cli._params import add_param_set_positional, base_params
from promptflow._cli._utils import activate_action, list_of_dict_to_dict
from promptflow._sdk._configuration import Configuration, InvalidConfigValue
from promptflow._sdk._utils import print_red_error
from promptflow._utils.logger_utils import get_cli_sdk_logger
logger = get_cli_sdk_logger()
def add_config_set(subparsers):
epilog = """
Examples:
# Config connection provider to azure workspace for current user:
pf config set connection.provider="azureml://subscriptions/<your-subscription>/resourceGroups/<your-resourcegroup>/providers/Microsoft.MachineLearningServices/workspaces/<your-workspace>"
""" # noqa: E501
activate_action(
name="set",
description="Set prompt flow configs for current user.",
epilog=epilog,
add_params=[add_param_set_positional] + base_params,
subparsers=subparsers,
help_message="Set prompt flow configs for current user, configs will be stored at ~/.promptflow/pf.yaml.",
action_param_name="sub_action",
)
def add_config_show(subparsers):
epilog = """
Examples:
# Show prompt flow for current user:
pf config show
"""
activate_action(
name="show",
description="Show prompt flow configs for current user.",
epilog=epilog,
add_params=base_params,
subparsers=subparsers,
help_message="Show prompt flow configs for current user.",
action_param_name="sub_action",
)
def add_config_parser(subparsers):
config_parser = subparsers.add_parser(
"config", description="A CLI tool to set prompt flow configs for current user.", help="pf config"
)
subparsers = config_parser.add_subparsers()
add_config_set(subparsers)
add_config_show(subparsers)
config_parser.set_defaults(action="config")
def dispatch_config_commands(args: argparse.Namespace):
if args.sub_action == "set":
set_config(args)
if args.sub_action == "show":
show_config()
def set_config(args):
params_override = list_of_dict_to_dict(args.params_override)
for k, v in params_override.items():
logger.debug("Setting config %s to %s", k, v)
try:
Configuration.get_instance().set_config(k, v)
print(f"Set config {args.params_override} successfully.")
except InvalidConfigValue as e:
error_message = f"Invalid config value {v!r} for {k!r}: {str(e)}"
print_red_error(error_message)
def show_config():
configs = Configuration.get_instance().get_all()
print(json.dumps(configs, indent=4))
| promptflow/src/promptflow/promptflow/_cli/_pf/_config.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_cli/_pf/_config.py",
"repo_id": "promptflow",
"token_count": 1038
} | 6 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from promptflow._version import VERSION
USER_AGENT = "{}/{}".format("promptflow-cli", VERSION)
| promptflow/src/promptflow/promptflow/_cli/_user_agent.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_cli/_user_agent.py",
"repo_id": "promptflow",
"token_count": 56
} | 7 |
$schema: https://azuremlschemas.azureedge.net/promptflow/latest/Flow.schema.json
inputs:
groundtruth:
type: string
prediction:
type: string
outputs:
results:
type: string
reference: ${line_process.output}
nodes:
- name: line_process
type: python
source:
type: code
path: line_process.py
inputs:
groundtruth: ${inputs.groundtruth}
prediction: ${inputs.prediction}
- name: aggregate
type: python
source:
type: code
path: aggregate.py
inputs:
processed_results: ${line_process.output}
aggregation: true
environment:
python_requirements_txt: requirements.txt
| promptflow/src/promptflow/promptflow/_cli/data/evaluation_flow/flow.dag.yaml/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_cli/data/evaluation_flow/flow.dag.yaml",
"repo_id": "promptflow",
"token_count": 225
} | 8 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import hashlib
import json
from dataclasses import dataclass
from typing import Callable, List
from promptflow._utils.logger_utils import flow_logger
from promptflow.contracts.run_info import RunInfo
from promptflow.storage import AbstractCacheStorage, AbstractRunStorage
PROMPTFLOW_HASH_ATTR = "__promptflow_hash_func"
def get_calculate_cache_func(tool_func):
return getattr(tool_func, PROMPTFLOW_HASH_ATTR, None)
def set_calculate_cache_func(tool_func, calculate_cache_func):
setattr(tool_func, PROMPTFLOW_HASH_ATTR, calculate_cache_func)
def enable_cache(calculate_cache_func):
def decorator_enable_cache(func):
set_calculate_cache_func(func, calculate_cache_func)
return func
return decorator_enable_cache
@dataclass
class CacheInfo:
hash_id: str = None
cache_string: str = None
@dataclass
class CacheResult:
result: object = None
cached_run_id: str = None
cached_flow_run_id: str = None
hit_cache: bool = False
class AbstractCacheManager:
@staticmethod
def init_from_env() -> "AbstractCacheManager":
# TODO: Return CacheManager after local execution is enabled.
return DummyCacheManager()
def calculate_cache_info(self, flow_id: str, tool_method: Callable, args, kwargs) -> CacheInfo:
raise NotImplementedError("AbstractCacheManager has not implemented method calculate_cache_info.")
def get_cache_result(self, cache_info: CacheInfo) -> CacheResult:
raise NotImplementedError("AbstractCacheManager has not implemented method get_cache_result.")
def persist_result(self, run_info: RunInfo, hash_id: str, cache_string: str, flow_id: str):
raise NotImplementedError("AbstractCacheManager has not implemented method persist_result.")
class DummyCacheManager(AbstractCacheManager):
def __init__(self):
pass
def calculate_cache_info(self, flow_id: str, tool_method: Callable, args, kwargs) -> CacheInfo:
return None
def get_cache_result(self, cache_info: CacheInfo) -> CacheResult:
return None
def persist_result(self, run_info: RunInfo, hash_id: str, cache_string: str, flow_id: str):
pass
class CacheManager(AbstractCacheManager):
def __init__(self, run_storage: AbstractRunStorage, cache_storage: AbstractCacheStorage):
self._run_storage = run_storage
self._cache_storage = cache_storage
def calculate_cache_info(self, flow_id: str, tool_method: Callable, args, kwargs) -> CacheInfo:
cache_function = get_calculate_cache_func(tool_method)
# Cache function is not registered with this tool.
if cache_function is None:
return None
# Calculate cache string and hash id.
try:
cache_string = cache_function(*args, **kwargs)
except Exception as ex:
flow_logger.warning(f"Failed to calculate cache string. Exception: {ex}")
return None
# Add flow_id and tool_name in the cache string.
# So that different flow_id and tool_name cannot reuse.
other_cache_string = json.dumps(
{
"flow_id": flow_id,
"tool_name": tool_method.__qualname__,
}
)
cache_string += other_cache_string
hash_id = self._calculate_hash_id(cache_string)
return CacheInfo(hash_id=hash_id, cache_string=cache_string)
def get_cache_result(self, cache_info: CacheInfo) -> CacheResult:
hash_id = cache_info.hash_id
# Query if cache result existed by hash_id.
cache_result_list: List[CacheInfo] = self._cache_storage.get_cache_record_list(hash_id=hash_id)
if len(cache_result_list) == 0:
return None
# Get the latest cache result.
cache_result = sorted(cache_result_list, reverse=True, key=lambda i: i.end_time)[0]
try:
cached_run_info = self._run_storage.get_node_run(cache_result.run_id)
except Exception as ex:
flow_logger.warning(
f"Failed to get cached run result. \
Run id:{cached_run_info.run_id}, flow run id: {cached_run_info.flow_run_id} \
Exception: {ex}"
)
return None
flow_logger.info(
f"Hit cached result of previous run: run id: \
{cached_run_info.run_id}, flow run id: {cached_run_info.flow_run_id}"
)
return CacheResult(
result=cached_run_info.result,
cached_run_id=cached_run_info.run_id,
cached_flow_run_id=cached_run_info.flow_run_id,
hit_cache=True,
)
def persist_result(self, run_info: RunInfo, cache_info: CacheInfo, flow_id: str):
self._cache_storage.persist_cache_result(run_info, cache_info.hash_id, cache_info.cache_string, flow_id)
@staticmethod
def _calculate_hash_id(cache_string: str):
return hashlib.sha1(cache_string.encode("utf-8")).hexdigest()
| promptflow/src/promptflow/promptflow/_core/cache_manager.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_core/cache_manager.py",
"repo_id": "promptflow",
"token_count": 2075
} | 9 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from os import PathLike
from typing import IO, AnyStr, Union
from promptflow._sdk._load_functions import load_run
from promptflow._sdk._pf_client import PFClient
from promptflow._sdk.entities._run import Run
def _create_run(run: Run, **kwargs):
client = PFClient()
return client.runs.create_or_update(run=run, **kwargs)
def create_yaml_run(source: Union[str, PathLike, IO[AnyStr]], params_override: list = None, **kwargs):
"""Create a run from a yaml file. Should only call from CLI."""
run = load_run(source, params_override=params_override, **kwargs)
return _create_run(run=run, **kwargs)
| promptflow/src/promptflow/promptflow/_sdk/_run_functions.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/_run_functions.py",
"repo_id": "promptflow",
"token_count": 236
} | 10 |
<!DOCTYPE html>
<html>
<head>
<title>Trace Visualization</title>
<style>
table, th, td {
border: 1px solid black;
border-collapse: collapse;
}
th, td {
padding: 5px;
text-align: left;
}
</style>
</head>
<body>
<button onclick="copyToClipboard()">Copy Content</button><br>
Line Runs
<table>
<thead>
<tr>
<th>trace_id</th>
<th>span_id</th>
<th>content</th>
</tr>
</thead>
<tbody>
{% for row in summary %}
<tr>
<td>{{ row.trace_id }}</td>
<td>{{ row.span_id }}</td>
<td><pre>{{ row.content }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
Traces
<table>
<thead>
<tr>
<th>trace_id</th>
<th>span_id</th>
<th>content</th>
</tr>
</thead>
<tbody>
{% for row in traces %}
<tr>
<td>{{ row.trace_id }}</td>
<td>{{ row.span_id }}</td>
<td><pre>{{ row.content }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
Evaluation Traces
<table>
<thead>
<tr>
<th>trace_id</th>
<th>span_id</th>
<th>content</th>
</tr>
</thead>
<tbody>
{% for row in eval_traces %}
<tr>
<td>{{ row.trace_id }}</td>
<td>{{ row.span_id }}</td>
<td><pre>{{ row.content }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
<div id="contentToCopy">{{ trace_ui_dict }}</div>
<script>
function copyToClipboard() {
// Get the text from the div
var content = document.getElementById('contentToCopy').innerText;
// Create a temporary textarea element to hold the text to copy
var tempElement = document.createElement('textarea');
tempElement.value = content;
document.body.appendChild(tempElement);
tempElement.select(); // Select the text inside the textarea
document.execCommand('copy'); // Execute the copy command
document.body.removeChild(tempElement); // Remove the temporary element
// Optionally, alert the user that the text has been copied
alert('Content copied to clipboard!');
}
</script>
</body>
</html>
| promptflow/src/promptflow/promptflow/_sdk/_service/templates/ui_traces.html/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/_service/templates/ui_traces.html",
"repo_id": "promptflow",
"token_count": 1509
} | 11 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
class FlowDataCollector:
"""FlowDataCollector is used to collect flow data via MDC for monitoring."""
def __init__(self, logger):
self.logger = logger
self._init_success = self._init_data_collector()
logger.info(f"Mdc init status: {self._init_success}")
def _init_data_collector(self) -> bool:
"""init data collector."""
self.logger.info("Init mdc...")
try:
from azureml.ai.monitoring import Collector
self.inputs_collector = Collector(name="model_inputs")
self.outputs_collector = Collector(name="model_outputs")
return True
except ImportError as e:
self.logger.warn(f"Load mdc related module failed: {e}")
return False
except Exception as e:
self.logger.warn(f"Init mdc failed: {e}")
return False
def collect_flow_data(self, input: dict, output: dict, req_id: str = None, client_req_id: str = None):
"""collect flow data via MDC for monitoring."""
if not self._init_success:
return
try:
import pandas as pd
from azureml.ai.monitoring.context import BasicCorrelationContext
# build context
ctx = BasicCorrelationContext(id=req_id)
# collect inputs
coll_input = {k: [v] for k, v in input.items()}
input_df = pd.DataFrame(coll_input)
self.inputs_collector.collect(input_df, ctx)
# collect outputs
coll_output = {k: [v] for k, v in output.items()}
output_df = pd.DataFrame(coll_output)
# collect outputs data, pass in correlation_context so inputs and outputs data can be correlated later
self.outputs_collector.collect(output_df, ctx)
except ImportError as e:
self.logger.warn(f"Load mdc related module failed: {e}")
except Exception as e:
self.logger.warn(f"Collect flow data failed: {e}")
| promptflow/src/promptflow/promptflow/_sdk/_serving/monitor/data_collector.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/_serving/monitor/data_collector.py",
"repo_id": "promptflow",
"token_count": 912
} | 12 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import logging
import os
import platform
import sys
from opencensus.ext.azure.log_exporter import AzureEventHandler
from promptflow._sdk._configuration import Configuration
# promptflow-sdk in east us
INSTRUMENTATION_KEY = "8b52b368-4c91-4226-b7f7-be52822f0509"
# cspell:ignore overriden
def get_appinsights_log_handler():
"""
Enable the OpenCensus logging handler for specified logger and instrumentation key to send info to AppInsights.
"""
from promptflow._sdk._telemetry.telemetry import is_telemetry_enabled
try:
config = Configuration.get_instance()
instrumentation_key = INSTRUMENTATION_KEY
custom_properties = {
"python_version": platform.python_version(),
"installation_id": config.get_or_set_installation_id(),
}
handler = PromptFlowSDKLogHandler(
connection_string=f"InstrumentationKey={instrumentation_key}",
custom_properties=custom_properties,
enable_telemetry=is_telemetry_enabled(),
)
return handler
except Exception: # pylint: disable=broad-except
# ignore any exceptions, telemetry collection errors shouldn't block an operation
return logging.NullHandler()
def get_scrubbed_cloud_role():
"""Create cloud role for telemetry, will scrub user script name and only leave extension."""
default = "Unknown Application"
known_scripts = [
"pfs",
"pfutil.py",
"pf",
"pfazure",
"pf.exe",
"pfazure.exe",
"app.py",
"python -m unittest",
"pytest",
"gunicorn",
"ipykernel_launcher.py",
"jupyter-notebook",
"jupyter-lab",
"python",
"_jb_pytest_runner.py",
default,
]
try:
cloud_role = os.path.basename(sys.argv[0]) or default
if cloud_role not in known_scripts:
ext = os.path.splitext(cloud_role)[1]
cloud_role = "***" + ext
except Exception:
# fallback to default cloud role if failed to scrub
cloud_role = default
return cloud_role
# cspell:ignore AzureMLSDKLogHandler
class PromptFlowSDKLogHandler(AzureEventHandler):
"""Customized AzureLogHandler for PromptFlow SDK"""
def __init__(self, custom_properties, enable_telemetry, **kwargs):
super().__init__(**kwargs)
# disable AzureEventHandler's logging to avoid warning affect user experience
self.disable_telemetry_logger()
self._is_telemetry_enabled = enable_telemetry
self._custom_dimensions = custom_properties
def _check_stats_collection(self):
# skip checking stats collection since it's time-consuming
# according to doc: https://learn.microsoft.com/en-us/azure/azure-monitor/app/statsbeat
# it doesn't affect customers' overall monitoring volume
return False
def emit(self, record):
# skip logging if telemetry is disabled
if not self._is_telemetry_enabled:
return
try:
self._queue.put(record, block=False)
# log the record immediately if it is an error
if record.exc_info and not all(item is None for item in record.exc_info):
self._queue.flush()
except Exception: # pylint: disable=broad-except
# ignore any exceptions, telemetry collection errors shouldn't block an operation
return
def log_record_to_envelope(self, record):
from promptflow._utils.utils import is_in_ci_pipeline
# skip logging if telemetry is disabled
if not self._is_telemetry_enabled:
return
custom_dimensions = {
"level": record.levelname,
# add to distinguish if the log is from ci pipeline
"from_ci": is_in_ci_pipeline(),
}
custom_dimensions.update(self._custom_dimensions)
if hasattr(record, "custom_dimensions") and isinstance(record.custom_dimensions, dict):
record.custom_dimensions.update(custom_dimensions)
else:
record.custom_dimensions = custom_dimensions
envelope = super().log_record_to_envelope(record=record)
# scrub data before sending to appinsights
role = get_scrubbed_cloud_role()
envelope.tags["ai.cloud.role"] = role
envelope.tags.pop("ai.cloud.roleInstance", None)
envelope.tags.pop("ai.device.id", None)
return envelope
@classmethod
def disable_telemetry_logger(cls):
"""Disable AzureEventHandler's logging to avoid warning affect user experience"""
from opencensus.ext.azure.common.processor import logger as processor_logger
from opencensus.ext.azure.common.storage import logger as storage_logger
from opencensus.ext.azure.common.transport import logger as transport_logger
processor_logger.setLevel(logging.CRITICAL)
transport_logger.setLevel(logging.CRITICAL)
storage_logger.setLevel(logging.CRITICAL)
| promptflow/src/promptflow/promptflow/_sdk/_telemetry/logging_handler.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/_telemetry/logging_handler.py",
"repo_id": "promptflow",
"token_count": 2042
} | 13 |
#!/bin/bash
echo "$(date -uIns) - promptflow-serve/finish $@"
echo "$(date -uIns) - Stopped all Gunicorn processes" | promptflow/src/promptflow/promptflow/_sdk/data/docker_csharp/runit/promptflow-serve/finish.jinja2/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/data/docker_csharp/runit/promptflow-serve/finish.jinja2",
"repo_id": "promptflow",
"token_count": 45
} | 14 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import abc
import json
from os import PathLike
from pathlib import Path
from typing import Dict, Optional, Tuple, Union
from marshmallow import Schema
from promptflow._constants import LANGUAGE_KEY, FlowLanguage
from promptflow._sdk._constants import (
BASE_PATH_CONTEXT_KEY,
DAG_FILE_NAME,
DEFAULT_ENCODING,
FLOW_TOOLS_JSON,
PROMPT_FLOW_DIR_NAME,
)
from promptflow._sdk.entities._connection import _Connection
from promptflow._sdk.entities._validation import SchemaValidatableMixin
from promptflow._utils.flow_utils import resolve_flow_path
from promptflow._utils.logger_utils import get_cli_sdk_logger
from promptflow._utils.yaml_utils import load_yaml, load_yaml_string
from promptflow.exceptions import ErrorTarget, UserErrorException
logger = get_cli_sdk_logger()
class FlowContext:
"""Flow context entity. the settings on this context will be applied to the flow when executing.
:param connections: Connections for the flow.
:type connections: Optional[Dict[str, Dict]]
:param variant: Variant of the flow.
:type variant: Optional[str]
:param variant: Overrides of the flow.
:type variant: Optional[Dict[str, Dict]]
:param streaming: Whether the flow's output need to be return in streaming mode.
:type streaming: Optional[bool]
"""
def __init__(
self,
*,
connections=None,
variant=None,
overrides=None,
streaming=None,
):
self.connections, self._connection_objs = connections or {}, {}
self.variant = variant
self.overrides = overrides or {}
self.streaming = streaming
# TODO: introduce connection provider support
def _resolve_connections(self):
# resolve connections and create placeholder for connection objects
for _, v in self.connections.items():
if isinstance(v, dict):
for k, conn in v.items():
if isinstance(conn, _Connection):
name = self._get_connection_obj_name(conn)
v[k] = name
self._connection_objs[name] = conn
@classmethod
def _get_connection_obj_name(cls, connection: _Connection):
# create a unique connection name for connection obj
# will generate same name if connection has same content
connection_dict = connection._to_dict()
connection_name = f"connection_{hash(json.dumps(connection_dict, sort_keys=True))}"
return connection_name
def _to_dict(self):
return {
"connections": self.connections,
"variant": self.variant,
"overrides": self.overrides,
"streaming": self.streaming,
}
def __eq__(self, other):
if isinstance(other, FlowContext):
return self._to_dict() == other._to_dict()
return False
def __hash__(self):
self._resolve_connections()
return hash(json.dumps(self._to_dict(), sort_keys=True))
class FlowBase(abc.ABC):
def __init__(self, *, data: dict, code: Path, path: Path, **kwargs):
self._context = FlowContext()
# flow.dag.yaml's content if provided
self._data = data
# working directory of the flow
self._code = Path(code).resolve()
# flow file path, can be script file or flow definition YAML file
self._path = Path(path).resolve()
# hash of flow's entry file, used to skip invoke if entry file is not changed
self._content_hash = kwargs.pop("content_hash", None)
super().__init__(**kwargs)
@property
def context(self) -> FlowContext:
return self._context
@context.setter
def context(self, val):
if not isinstance(val, FlowContext):
raise UserErrorException("context must be a FlowContext object, got {type(val)} instead.")
self._context = val
@property
def code(self) -> Path:
"""Working directory of the flow."""
return self._code
@property
def path(self) -> Path:
"""Flow file path. Can be script file or flow definition YAML file."""
return self._path
@property
def language(self) -> str:
"""Language of the flow."""
return self._data.get(LANGUAGE_KEY, FlowLanguage.Python)
@property
def additional_includes(self) -> list:
"""Additional includes of the flow."""
return self._data.get("additional_includes", [])
@classmethod
# pylint: disable=unused-argument
def _resolve_cls_and_type(cls, data, params_override):
"""Resolve the class to use for deserializing the data. Return current class if no override is provided.
:param data: Data to deserialize.
:type data: dict
:param params_override: Parameters to override, defaults to None
:type params_override: typing.Optional[list]
:return: Class to use for deserializing the data & its "type". Type will be None if no override is provided.
:rtype: tuple[class, typing.Optional[str]]
"""
return cls, "flow"
class Flow(FlowBase):
"""This class is used to represent a flow."""
def __init__(
self,
code: Union[str, PathLike],
path: Union[str, PathLike],
dag: dict,
**kwargs,
):
self.variant = kwargs.pop("variant", None) or {}
super().__init__(data=dag, code=code, path=path, **kwargs)
@classmethod
def _is_eager_flow(cls, data: dict):
"""Check if the flow is an eager flow. Use field 'entry' to determine."""
# If entry specified, it's an eager flow.
return data.get("entry")
@classmethod
def load(
cls,
source: Union[str, PathLike],
entry: str = None,
**kwargs,
):
from promptflow._sdk.entities._eager_flow import EagerFlow
source_path = Path(source)
if not source_path.exists():
raise UserErrorException(f"Source {source_path.absolute().as_posix()} does not exist")
flow_path = resolve_flow_path(source_path)
if not flow_path.exists():
raise UserErrorException(f"Flow file {flow_path.absolute().as_posix()} does not exist")
if flow_path.suffix in [".yaml", ".yml"]:
# read flow file to get hash
with open(flow_path, "r", encoding=DEFAULT_ENCODING) as f:
flow_content = f.read()
data = load_yaml_string(flow_content)
content_hash = hash(flow_content)
is_eager_flow = cls._is_eager_flow(data)
if is_eager_flow:
return EagerFlow._load(path=flow_path, data=data, **kwargs)
else:
# TODO: schema validation and warning on unknown fields
return ProtectedFlow._load(path=flow_path, dag=data, content_hash=content_hash, **kwargs)
# if non-YAML file is provided, raise user error exception
raise UserErrorException("Source must be a directory or a 'flow.dag.yaml' file")
def _init_executable(self, tuning_node=None, variant=None):
from promptflow._sdk._submitter import variant_overwrite_context
# TODO: check if there is potential bug here
# this is a little wired:
# 1. the executable is created from a temp folder when there is additional includes
# 2. after the executable is returned, the temp folder is deleted
with variant_overwrite_context(self, tuning_node, variant) as flow:
from promptflow.contracts.flow import Flow as ExecutableFlow
return ExecutableFlow.from_yaml(flow_file=flow.path, working_dir=flow.code)
def __eq__(self, other):
if isinstance(other, Flow):
return self._content_hash == other._content_hash and self.context == other.context
return False
def __hash__(self):
return hash(self.context) ^ self._content_hash
class ProtectedFlow(Flow, SchemaValidatableMixin):
"""This class is used to hide internal interfaces from user.
User interface should be carefully designed to avoid breaking changes, while developers may need to change internal
interfaces to improve the code quality. On the other hand, making all internal interfaces private will make it
strange to use them everywhere inside this package.
Ideally, developers should always initialize ProtectedFlow object instead of Flow object.
"""
def __init__(
self,
path: Path,
code: Path,
dag: dict,
params_override: Optional[Dict] = None,
**kwargs,
):
super().__init__(path=path, code=code, dag=dag, **kwargs)
self._flow_dir, self._dag_file_name = self._get_flow_definition(self.code)
self._executable = None
self._params_override = params_override
@classmethod
def _load(cls, path: Path, dag: dict, **kwargs):
return cls(path=path, code=path.parent, dag=dag, **kwargs)
@property
def flow_dag_path(self) -> Path:
return self._flow_dir / self._dag_file_name
@property
def name(self) -> str:
return self._flow_dir.name
@property
def display_name(self) -> str:
return self._data.get("display_name", self.name)
@property
def tools_meta_path(self) -> Path:
target_path = self._flow_dir / PROMPT_FLOW_DIR_NAME / FLOW_TOOLS_JSON
target_path.parent.mkdir(parents=True, exist_ok=True)
return target_path
@classmethod
def _get_flow_definition(cls, flow, base_path=None) -> Tuple[Path, str]:
if base_path:
flow_path = Path(base_path) / flow
else:
flow_path = Path(flow)
if flow_path.is_dir() and (flow_path / DAG_FILE_NAME).is_file():
return flow_path, DAG_FILE_NAME
elif flow_path.is_file():
return flow_path.parent, flow_path.name
raise ValueError(f"Can't find flow with path {flow_path.as_posix()}.")
# region SchemaValidatableMixin
@classmethod
def _create_schema_for_validation(cls, context) -> Schema:
# import here to avoid circular import
from ..schemas._flow import FlowSchema
return FlowSchema(context=context)
def _default_context(self) -> dict:
return {BASE_PATH_CONTEXT_KEY: self._flow_dir}
def _create_validation_error(self, message, no_personal_data_message=None):
return UserErrorException(
message=message,
target=ErrorTarget.CONTROL_PLANE_SDK,
no_personal_data_message=no_personal_data_message,
)
def _dump_for_validation(self) -> Dict:
# Flow is read-only in control plane, so we always dump the flow from file
data = load_yaml(self.flow_dag_path)
if isinstance(self._params_override, dict):
data.update(self._params_override)
return data
# endregion
# region MLFlow model requirements
@property
def inputs(self):
# This is used for build mlflow model signature.
if not self._executable:
self._executable = self._init_executable()
return {k: v.type.value for k, v in self._executable.inputs.items()}
@property
def outputs(self):
# This is used for build mlflow model signature.
if not self._executable:
self._executable = self._init_executable()
return {k: v.type.value for k, v in self._executable.outputs.items()}
# endregion
def __call__(self, *args, **kwargs):
"""Calling flow as a function, the inputs should be provided with key word arguments.
Returns the output of the flow.
The function call throws UserErrorException: if the flow is not valid or the inputs are not valid.
SystemErrorException: if the flow execution failed due to unexpected executor error.
:param args: positional arguments are not supported.
:param kwargs: flow inputs with key word arguments.
:return:
"""
if args:
raise UserErrorException("Flow can only be called with keyword arguments.")
result = self.invoke(inputs=kwargs)
return result.output
def invoke(self, inputs: dict) -> "LineResult":
"""Invoke a flow and get a LineResult object."""
from promptflow._sdk._submitter import TestSubmitter
from promptflow._sdk.operations._flow_context_resolver import FlowContextResolver
if self.language == FlowLanguage.CSharp:
with TestSubmitter(flow=self, flow_context=self.context).init(
stream_output=self.context.streaming
) as submitter:
result = submitter.flow_test(inputs=inputs, allow_generator_output=self.context.streaming)
return result
else:
invoker = FlowContextResolver.resolve(flow=self)
result = invoker._invoke(
data=inputs,
)
return result
| promptflow/src/promptflow/promptflow/_sdk/entities/_flow.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/entities/_flow.py",
"repo_id": "promptflow",
"token_count": 5239
} | 15 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import importlib.util
import inspect
import io
import json
import logging
import pkgutil
from dataclasses import asdict
from os import PathLike
from pathlib import Path
from types import ModuleType
from typing import Union
import jsonschema
from promptflow._core.tool_meta_generator import (
ToolValidationError,
_parse_tool_from_function,
asdict_without_none,
is_tool,
)
from promptflow._core.tools_manager import PACKAGE_TOOLS_ENTRY, collect_package_tools
from promptflow._sdk._constants import ICON, ICON_DARK, ICON_LIGHT, LOGGER_NAME, SKIP_FUNC_PARAMS, TOOL_SCHEMA
from promptflow._sdk._telemetry import ActivityType, monitor_operation
from promptflow._sdk.entities._validation import ValidationResult, ValidationResultBuilder
from promptflow._utils.multimedia_utils import convert_multimedia_data_to_base64
from promptflow.contracts.multimedia import Image
from promptflow.exceptions import UserErrorException
TOTAL_COUNT = "total_count"
INVALID_COUNT = "invalid_count"
logger = logging.getLogger(LOGGER_NAME)
class ToolOperations:
"""ToolOperations."""
def __init__(self):
self._tool_schema_dict = None
@property
def _tool_schema(self):
if not self._tool_schema_dict:
with open(TOOL_SCHEMA, "r") as f:
self._tool_schema_dict = json.load(f)
return self._tool_schema_dict
def _merge_validate_result(self, target, source):
target.merge_with(source)
target._set_extra_info(
TOTAL_COUNT,
target._get_extra_info(TOTAL_COUNT, 0) + source._get_extra_info(TOTAL_COUNT, 0),
)
target._set_extra_info(
INVALID_COUNT,
target._get_extra_info(INVALID_COUNT, 0) + source._get_extra_info(INVALID_COUNT, 0),
)
def _list_tools_in_package(self, package_name: str, raise_error: bool = False):
"""
List the meta of all tools in the package. Raise user error if raise_error=True and found incorrect tools.
:param package_name: Package name
:type package_name: str
:param raise_error: Whether to raise the error.
:type raise_error: bool
:return: Dict of tools meta
:rtype: Dict[str, Dict]
"""
package_tools, validate_result = self._list_tool_meta_in_package(package_name=package_name)
if not validate_result.passed:
if raise_error:
def tool_validate_error_func(msg, _):
return ToolValidationError(message=msg, validate_result=validate_result)
validate_result.try_raise(raise_error=raise_error, error_func=tool_validate_error_func)
else:
logger.warning(f"Found invalid tool(s):\n {repr(validate_result)}")
return package_tools
def _list_tool_meta_in_package(self, package_name: str):
"""
List the meta of all tools in the package.
:param package_name: Package name
:type package_name: str
:return: Dict of tools meta, validation result
:rtype: Dict[str, Dict], ValidationResult
"""
package_tools = {}
validate_result = ValidationResultBuilder.success()
try:
package = __import__(package_name)
module_list = pkgutil.walk_packages(package.__path__, prefix=package.__name__ + ".")
for module in module_list:
module_tools, module_validate_result = self._generate_tool_meta(importlib.import_module(module.name))
package_tools.update(module_tools)
self._merge_validate_result(validate_result, module_validate_result)
except ImportError as e:
raise UserErrorException(f"Cannot find the package {package_name}, {e}.")
return package_tools, validate_result
def _generate_tool_meta(self, tool_module):
"""
Generate tools meta in the module.
:param tool_module: The module needs to generate tools meta
:type tool_module: object
:return: Dict of tools meta, validation result
:rtype: Dict[str, Dict], ValidationResult
"""
tool_functions = self._collect_tool_functions_in_module(tool_module)
tool_methods = self._collect_tool_class_methods_in_module(tool_module)
construct_tools = {}
invalid_tool_count = 0
tool_validate_result = ValidationResultBuilder.success()
for f in tool_functions:
tool, input_settings, extra_info = self._parse_tool_from_func(f)
construct_tool, validate_result = self._serialize_tool(tool, input_settings, extra_info, f)
if validate_result.passed:
tool_name = self._get_tool_name(tool)
construct_tools[tool_name] = construct_tool
else:
invalid_tool_count = invalid_tool_count + 1
tool_validate_result.merge_with(validate_result)
for f, initialize_inputs in tool_methods:
tool, input_settings, extra_info = self._parse_tool_from_func(f, initialize_inputs)
construct_tool, validate_result = self._serialize_tool(tool, input_settings, extra_info, f)
if validate_result.passed:
tool_name = self._get_tool_name(tool)
construct_tools[tool_name] = construct_tool
else:
invalid_tool_count = invalid_tool_count + 1
tool_validate_result.merge_with(validate_result)
# The generated dict cannot be dumped as yaml directly since yaml cannot handle string enum.
tools = json.loads(json.dumps(construct_tools))
tool_validate_result._set_extra_info(TOTAL_COUNT, len(tool_functions) + len(tool_methods))
tool_validate_result._set_extra_info(INVALID_COUNT, invalid_tool_count)
return tools, tool_validate_result
@staticmethod
def _collect_tool_functions_in_module(tool_module):
tools = []
for _, obj in inspect.getmembers(tool_module):
if is_tool(obj):
# Note that the tool should be in defined in exec but not imported in exec,
# so it should also have the same module with the current function.
if getattr(obj, "__module__", "") != tool_module.__name__:
continue
tools.append(obj)
return tools
@staticmethod
def _collect_tool_class_methods_in_module(tool_module):
from promptflow._core.tool import ToolProvider
tools = []
for _, obj in inspect.getmembers(tool_module):
if isinstance(obj, type) and issubclass(obj, ToolProvider) and obj.__module__ == tool_module.__name__:
for _, method in inspect.getmembers(obj):
if is_tool(method):
initialize_inputs = obj.get_initialize_inputs()
tools.append((method, initialize_inputs))
return tools
def _get_tool_name(self, tool):
tool_name = (
f"{tool.module}.{tool.class_name}.{tool.function}"
if tool.class_name is not None
else f"{tool.module}.{tool.function}"
)
return tool_name
def _parse_tool_from_func(self, tool_func, initialize_inputs=None):
"""
Parse tool from tool function
:param tool_func: The tool function
:type tool_func: callable
:param initialize_inputs: Initialize inputs of tool
:type initialize_inputs: Dict[str, obj]
:return: tool object, tool input settings, extra info about the tool
:rtype: Tool, Dict[str, InputSetting], Dict[str, obj]
"""
tool = _parse_tool_from_function(
tool_func, initialize_inputs=initialize_inputs, gen_custom_type_conn=True, skip_prompt_template=True
)
extra_info = getattr(tool_func, "__extra_info")
input_settings = getattr(tool_func, "__input_settings")
return tool, input_settings, extra_info
def _validate_tool_function(self, tool, input_settings, extra_info, func_name=None, func_path=None):
"""
Check whether the icon and input settings of the tool are legitimate.
:param tool: The tool object
:type tool: Tool
:param input_settings: Input settings of the tool
:type input_settings: Dict[str, InputSetting]
:param extra_info: Extra info about the tool
:type extra_info: Dict[str, obj]
:param func_name: Function name of the tool
:type func_name: str
:param func_path: Script path of the tool
:type func_path: str
:return: Validation result of the tool
:rtype: ValidationResult
"""
validate_result = ValidationResultBuilder.success()
if extra_info:
if ICON in extra_info:
if ICON_LIGHT in extra_info or ICON_DARK in extra_info:
validate_result.append_error(
yaml_path=None,
message=f"Cannot provide both `icon` and `{ICON_LIGHT}` or `{ICON_DARK}`.",
function_name=func_name,
location=func_path,
key="function_name",
)
if input_settings:
input_settings_validate_result = self._validate_input_settings(
tool.inputs, input_settings, func_name, func_path
)
validate_result.merge_with(input_settings_validate_result)
return validate_result
def _validate_tool_schema(self, tool_dict, func_name=None, func_path=None):
"""
Check whether the generated schema of the tool are legitimate.
:param tool_dict: The generated tool dict
:type tool_dict: Dict[str, obj]
:param func_name: Function name of the tool
:type func_name: str
:param func_path: Script path of the tool
:type func_path: str
:return: Validation result of the tool
:rtype: ValidationResult
"""
validate_result = ValidationResultBuilder.success()
try:
jsonschema.validate(instance=tool_dict, schema=self._tool_schema)
except jsonschema.exceptions.ValidationError as e:
validate_result.append_error(
message=str(e), yaml_path=None, function_name=func_name, location=func_path, key="function_name"
)
return validate_result
def _validate_input_settings(self, tool_inputs, input_settings, func_name=None, func_path=None):
"""
Check whether input settings of the tool are legitimate.
:param tool_inputs: Tool inputs
:type tool_inputs: Dict[str, obj]
:param input_settings: Input settings of the tool
:type input_settings: Dict[str, InputSetting]
:param extra_info: Extra info about the tool
:type extra_info: Dict[str, obj]
:param func_name: Function name of the tool
:type func_name: str
:param func_path: Script path of the tool
:type func_path: str
:return: Validation result of the tool
:rtype: ValidationResult
"""
validate_result = ValidationResultBuilder.success()
for input_name, settings in input_settings.items():
if input_name not in tool_inputs:
validate_result.append_error(
yaml_path=None,
message=f"Cannot find {input_name} in tool inputs.",
function_name=func_name,
location=func_path,
key="function_name",
)
if settings.enabled_by and settings.enabled_by not in tool_inputs:
validate_result.append_error(
yaml_path=None,
message=f'Cannot find the input "{settings.enabled_by}" for the enabled_by of {input_name}.',
function_name=func_name,
location=func_path,
key="function_name",
)
if settings.dynamic_list:
dynamic_func_inputs = inspect.signature(settings.dynamic_list._func_obj).parameters
has_kwargs = any([param.kind == param.VAR_KEYWORD for param in dynamic_func_inputs.values()])
required_inputs = [
k
for k, v in dynamic_func_inputs.items()
if v.default is inspect.Parameter.empty and v.kind != v.VAR_KEYWORD and k not in SKIP_FUNC_PARAMS
]
if settings.dynamic_list._input_mapping:
# Validate input mapping in dynamic_list
for func_input, reference_input in settings.dynamic_list._input_mapping.items():
# Check invalid input name of dynamic list function
if not has_kwargs and func_input not in dynamic_func_inputs:
validate_result.append_error(
yaml_path=None,
message=f"Cannot find {func_input} in the inputs of "
f"dynamic_list func {settings.dynamic_list.func_path}",
function_name=func_name,
location=func_path,
key="function_name",
)
# Check invalid input name of tool
if reference_input not in tool_inputs:
validate_result.append_error(
yaml_path=None,
message=f"Cannot find {reference_input} in the tool inputs.",
function_name=func_name,
location=func_path,
key="function_name",
)
if func_input in required_inputs:
required_inputs.remove(func_input)
# Check required input of dynamic_list function
if len(required_inputs) != 0:
validate_result.append_error(
yaml_path=None,
message=f"Missing required input(s) of dynamic_list function: {required_inputs}",
function_name=func_name,
location=func_path,
key="function_name",
)
return validate_result
def _serialize_tool(self, tool, input_settings, extra_info, tool_func):
"""
Serialize tool obj to dict.
:param tool_func: Package tool function
:type tool_func: callable
:param initialize_inputs: Initialize inputs of package tool
:type initialize_inputs: Dict[str, obj]
:return: package tool name, serialized tool
:rtype: str, Dict[str, str]
"""
tool_func_name = tool_func.__name__
tool_script_path = inspect.getsourcefile(getattr(tool_func, "__original_function", tool_func))
validate_result = self._validate_tool_function(
tool, input_settings, extra_info, tool_func_name, tool_script_path
)
if validate_result.passed:
construct_tool = asdict(tool, dict_factory=lambda x: {k: v for (k, v) in x if v})
if extra_info:
if ICON in extra_info:
extra_info[ICON] = self._serialize_icon_data(extra_info["icon"])
if ICON_LIGHT in extra_info:
icon = extra_info.get("icon", {})
icon["light"] = self._serialize_icon_data(extra_info[ICON_LIGHT])
extra_info[ICON] = icon
if ICON_DARK in extra_info:
icon = extra_info.get("icon", {})
icon["dark"] = self._serialize_icon_data(extra_info[ICON_DARK])
extra_info[ICON] = icon
construct_tool.update(extra_info)
# Update tool input settings
if input_settings:
tool_inputs = construct_tool.get("inputs", {})
generated_by_inputs = {}
for input_name, settings in input_settings.items():
tool_inputs[input_name].update(asdict_without_none(settings))
kwargs = settings._kwargs or {}
for k, v in kwargs.items():
if k in tool_inputs[input_name]:
if isinstance(v, dict):
tool_inputs[input_name][k].update(v)
elif isinstance(v, list):
tool_inputs[input_name][k].append(v)
else:
logger.debug(
f"InputSetting {k} of {input_name} will be overwrite from"
f" {tool_inputs[input_name][k]} to {v}."
)
tool_inputs[input_name][k] = v
else:
tool_inputs[input_name][k] = v
if settings.generated_by:
generated_by_inputs.update(settings.generated_by._input_settings)
tool_inputs.update(generated_by_inputs)
schema_validate_result = self._validate_tool_schema(construct_tool, tool_func_name, tool_script_path)
validate_result.merge_with(schema_validate_result)
return construct_tool, validate_result
else:
return {}, validate_result
def _serialize_icon_data(self, icon):
if not Path(icon).exists():
raise UserErrorException(f"Cannot find the icon path {icon}.")
return self._serialize_image_data(icon)
@staticmethod
def _serialize_image_data(image_path):
"""Serialize image to base64."""
from PIL import Image as PIL_Image
with open(image_path, "rb") as image_file:
# Create a BytesIO object from the image file
image_data = io.BytesIO(image_file.read())
# Open the image and resize it
img = PIL_Image.open(image_data)
if img.size != (16, 16):
img = img.resize((16, 16), PIL_Image.Resampling.LANCZOS)
buffered = io.BytesIO()
img.save(buffered, format="PNG")
icon_image = Image(buffered.getvalue(), mime_type="image/png")
image_url = convert_multimedia_data_to_base64(icon_image, with_type=True)
return image_url
@staticmethod
def _is_package_tool(package) -> bool:
import pkg_resources
try:
distribution = pkg_resources.get_distribution(package.__name__)
entry_points = distribution.get_entry_map()
return PACKAGE_TOOLS_ENTRY in entry_points
except Exception as e:
logger.debug(f"Failed to check {package.__name__} is a package tool, raise {e}")
return False
@monitor_operation(activity_name="pf.tools.list", activity_type=ActivityType.PUBLICAPI)
def list(
self,
flow: Union[str, PathLike] = None,
):
"""
List all package tools in the environment and code tools in the flow.
:param flow: path to the flow directory
:type flow: Union[str, PathLike]
:return: Dict of package tools and code tools info.
:rtype: Dict[str, Dict]
"""
from promptflow._sdk._pf_client import PFClient
local_client = PFClient()
package_tools = collect_package_tools()
if flow:
tools, _ = local_client.flows._generate_tools_meta(flow)
else:
tools = {"package": {}, "code": {}}
tools["package"].update(package_tools)
return tools
@monitor_operation(activity_name="pf.tools.validate", activity_type=ActivityType.PUBLICAPI)
def validate(
self, source: Union[str, callable, PathLike], *, raise_error: bool = False, **kwargs
) -> ValidationResult:
"""
Validate tool.
:param source: path to the package tool directory or tool script
:type source: Union[str, callable, PathLike]
:param raise_error: whether raise error when validation failed
:type raise_error: bool
:return: a validation result object
:rtype: ValidationResult
"""
def validate_tool_function(tool_func, init_inputs=None):
tool, input_settings, extra_info = self._parse_tool_from_func(tool_func, init_inputs)
_, validate_result = self._serialize_tool(tool, input_settings, extra_info, source)
validate_result._set_extra_info(TOTAL_COUNT, 1)
validate_result._set_extra_info(INVALID_COUNT, 0 if validate_result.passed else 1)
return validate_result
if callable(source):
from promptflow._core.tool import ToolProvider
if isinstance(source, type) and issubclass(source, ToolProvider):
# Validate tool class
validate_result = ValidationResultBuilder.success()
for _, method in inspect.getmembers(source):
if is_tool(method):
initialize_inputs = source.get_initialize_inputs()
func_validate_result = validate_tool_function(method, initialize_inputs)
self._merge_validate_result(validate_result, func_validate_result)
else:
# Validate tool function
validate_result = validate_tool_function(source)
elif isinstance(source, (str, PathLike)):
# Validate tool script
if not Path(source).exists():
raise UserErrorException(f"Cannot find the tool script {source}")
# Load the module from the file path
module_name = Path(source).stem
spec = importlib.util.spec_from_file_location(module_name, source)
module = importlib.util.module_from_spec(spec)
# Load the module's code
spec.loader.exec_module(module)
_, validate_result = self._generate_tool_meta(module)
elif isinstance(source, ModuleType):
# Validate package tool
if not self._is_package_tool(source):
raise UserErrorException("Invalid package tool.")
_, validate_result = self._list_tool_meta_in_package(package_name=source.__name__)
else:
raise UserErrorException(
"Provide invalid source, tool validation source supports script tool, "
"package tool and tool script path."
)
def tool_validate_error_func(msg, _):
return ToolValidationError(message=msg)
validate_result.try_raise(raise_error=raise_error, error_func=tool_validate_error_func)
return validate_result
| promptflow/src/promptflow/promptflow/_sdk/operations/_tool_operations.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/operations/_tool_operations.py",
"repo_id": "promptflow",
"token_count": 10752
} | 16 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import re
class CredentialScrubber:
"""Scrub sensitive information in string."""
PLACE_HOLDER = "**data_scrubbed**"
LENGTH_THRESHOLD = 2
def __init__(self):
self.default_regex_set = set(
[
r"(?<=sig=)[^\s;&]+", # Replace signature.
r"(?<=key=)[^\s;&]+", # Replace key.
]
)
self.default_str_set = set()
self.custom_regex_set = set()
self.custom_str_set = set()
def scrub(self, input: str):
"""Replace sensitive information in input string with PLACE_HOLDER.
For example, for input string: "print accountkey=accountKey", the output will be:
"print accountkey=**data_scrubbed**"
"""
output = input
regex_set = self.default_regex_set.union(self.custom_regex_set)
for regex in regex_set:
output = re.sub(regex, self.PLACE_HOLDER, output, flags=re.IGNORECASE)
str_set = self.default_str_set.union(self.custom_str_set)
for s in str_set:
output = output.replace(s, self.PLACE_HOLDER)
return output
def add_regex(self, pattern: str):
# policy: http://policheck.azurewebsites.net/Pages/TermInfo.aspx?LCID=9&TermID=79458
"""Add regex pattern to checklist."""
self.custom_regex_set.add(pattern)
def add_str(self, s: str):
"""Add string to checklist.
Only scrub string with length > LENGTH_THRESHOLD.
"""
if s is None:
return
if len(s) <= self.LENGTH_THRESHOLD:
return
self.custom_str_set.add(s)
def clear(self):
"""Clear custom regex and string set."""
self.custom_regex_set = set()
self.custom_str_set = set()
| promptflow/src/promptflow/promptflow/_utils/credential_scrubber.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_utils/credential_scrubber.py",
"repo_id": "promptflow",
"token_count": 856
} | 17 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
"""This is a common util file.
!!!Please do not include any project related import.!!!
"""
import contextlib
import contextvars
import functools
import importlib
import json
import logging
import os
import re
import time
import traceback
from datetime import datetime
from pathlib import Path
from typing import Any, Dict, Iterable, Iterator, List, Optional, TypeVar, Union
from promptflow._constants import DEFAULT_ENCODING
T = TypeVar("T")
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __getattr__(self, item):
if item in self:
return self.__getitem__(item)
return super().__getattribute__(item)
def camel_to_snake(text: str) -> Optional[str]:
text = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", text)
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", text).lower()
class DateTimeEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, datetime):
return o.isoformat()
return json.JSONEncoder.default(self, o)
def is_json_serializable(value: Any) -> bool:
try:
json.dumps(value)
return True
except TypeError:
return False
def load_json(file_path: Union[str, Path]) -> dict:
if os.path.getsize(file_path) > 0:
with open(file_path, "r") as f:
return json.load(f)
return {}
def dump_list_to_jsonl(file_path: Union[str, Path], list_data: List[Dict]):
with open(file_path, "w", encoding=DEFAULT_ENCODING) as jsonl_file:
for data in list_data:
json.dump(data, jsonl_file, ensure_ascii=False)
jsonl_file.write("\n")
def transpose(values: List[Dict[str, Any]], keys: Optional[List] = None) -> Dict[str, List]:
keys = keys or list(values[0].keys())
return {key: [v.get(key) for v in values] for key in keys}
def reverse_transpose(values: Dict[str, List]) -> List[Dict[str, Any]]:
# Setup a result list same len with values
value_lists = list(values.values())
_len = len(value_lists[0])
if any(len(value_list) != _len for value_list in value_lists):
raise Exception(f"Value list of each key must have same length, please check {values!r}.")
result = []
for i in range(_len):
result.append({})
for key, vals in values.items():
for _idx, val in enumerate(vals):
result[_idx][key] = val
return result
def deprecated(f=None, replace=None, version=None):
if f is None:
return functools.partial(deprecated, replace=replace, version=version)
msg = [f"Function {f.__qualname__!r} is deprecated."]
if version:
msg.append(f"Deprecated since version {version}.")
if replace:
msg.append(f"Use {replace!r} instead.")
msg = " ".join(msg)
@functools.wraps(f)
def wrapper(*args, **kwargs):
logging.warning(msg)
return f(*args, **kwargs)
return wrapper
def try_import(module, error_message, raise_error=True):
try:
importlib.import_module(module)
except ImportError as e:
ex_message = f"{error_message} Root cause: {e!r}"
logging.warning(ex_message)
if raise_error:
raise Exception(ex_message)
def is_in_ci_pipeline():
if os.environ.get("IS_IN_CI_PIPELINE") == "true":
return True
return False
def count_and_log_progress(
inputs: Iterable[T], logger: logging.Logger, total_count: int, formatter="{count} / {total_count} finished."
) -> Iterator[T]:
log_interval = max(int(total_count / 10), 1)
count = 0
for item in inputs:
count += 1
if count % log_interval == 0 or count == total_count:
logger.info(formatter.format(count=count, total_count=total_count))
yield item
def log_progress(
run_start_time: datetime,
logger: logging.Logger,
count: int,
total_count: int,
formatter="Finished {count} / {total_count} lines.",
*,
last_log_count: Optional[int] = None,
):
# Calculate log_interval to determine when to log progress.
# If total_count is less than 100, log every 10% of total_count; otherwise, log every 10 lines.
log_interval = min(10, max(int(total_count / 10), 1))
# If last_log_count is not None, determine whether to log based on whether the difference
# between the current count and the previous count exceeds log_interval.
# Otherwise, decide based on whether the current count is evenly divisible by log_interval.
if last_log_count:
log_flag = (count - last_log_count) >= log_interval
else:
log_flag = count % log_interval == 0
if count > 0 and (log_flag or count == total_count):
average_execution_time = round((datetime.utcnow().timestamp() - run_start_time.timestamp()) / count, 2)
estimated_execution_time = round(average_execution_time * (total_count - count), 2)
logger.info(formatter.format(count=count, total_count=total_count))
logger.info(
f"Average execution time for completed lines: {average_execution_time} seconds. "
f"Estimated time for incomplete lines: {estimated_execution_time} seconds."
)
def extract_user_frame_summaries(frame_summaries: List[traceback.FrameSummary]):
from promptflow import _core
core_folder = os.path.dirname(_core.__file__)
for i in range(len(frame_summaries) - 1):
cur_file = frame_summaries[i].filename
next_file = frame_summaries[i + 1].filename
# If the current frame is in _core folder and the next frame is not in _core folder
# then we can say that the next frame is in user code.
if cur_file.startswith(core_folder) and not next_file.startswith(core_folder):
return frame_summaries[i + 1 :]
return frame_summaries
def format_user_stacktrace(frame):
# TODO: Maybe we can filter all frames from our code base to make it clean?
frame_summaries = traceback.extract_stack(frame)
user_frame_summaries = extract_user_frame_summaries(frame_summaries)
return traceback.format_list(user_frame_summaries)
def generate_elapsed_time_messages(func_name: str, start_time: float, interval: int, thread_id: int):
import sys
frames = sys._current_frames()
if thread_id not in frames:
thread_msg = (
f"thread {thread_id} cannot be found in sys._current_frames, "
+ "maybe it has been terminated due to unexpected errors."
)
else:
frame = frames[thread_id]
stack_msgs = format_user_stacktrace(frame)
stack_msg = "".join(stack_msgs)
thread_msg = f"stacktrace of thread {thread_id}:\n{stack_msg}"
elapse_time = time.perf_counter() - start_time
# Make elapse time a multiple of interval.
elapse_time = round(elapse_time / interval) * interval
msgs = [f"{func_name} has been running for {elapse_time:.0f} seconds, {thread_msg}"]
return msgs
def set_context(context: contextvars.Context):
for var, value in context.items():
var.set(value)
def convert_inputs_mapping_to_param(inputs_mapping: dict):
"""Use this function to convert inputs_mapping to a string that can be passed to component as a string parameter,
we have to do this since we can't pass a dict as a parameter to component.
# TODO: Finalize the format of inputs_mapping
"""
return ",".join([f"{k}={v}" for k, v in inputs_mapping.items()])
@contextlib.contextmanager
def environment_variable_overwrite(key, val):
if key in os.environ.keys():
backup_value = os.environ[key]
else:
backup_value = None
os.environ[key] = val
try:
yield
finally:
if backup_value:
os.environ[key] = backup_value
else:
os.environ.pop(key)
def resolve_dir_to_absolute(base_dir: Union[str, Path], sub_dir: Union[str, Path]) -> Path:
"""Resolve directory to absolute path with base_dir as root"""
path = sub_dir if isinstance(sub_dir, Path) else Path(sub_dir)
if not path.is_absolute():
base_dir = base_dir if isinstance(base_dir, Path) else Path(base_dir)
path = base_dir / sub_dir
return path
def parse_ua_to_dict(ua):
"""Parse string user agent to dict with name as ua name and value as ua version."""
ua_dict = {}
ua_list = ua.split(" ")
for item in ua_list:
if item:
key, value = item.split("/")
ua_dict[key] = value
return ua_dict
# TODO: Add "conditions" parameter to pass in a list of lambda functions
# to check if the environment variable is valid.
def get_int_env_var(env_var_name, default_value=None):
"""
The function `get_int_env_var` retrieves an integer environment variable value, with an optional
default value if the variable is not set or cannot be converted to an integer.
:param env_var_name: The name of the environment variable you want to retrieve the value of
:param default_value: The default value is the value that will be returned if the environment
variable is not found or if it cannot be converted to an integer
:return: an integer value.
"""
try:
return int(os.environ.get(env_var_name, default_value))
except Exception:
return default_value
def prompt_y_n(msg, default=None):
if default not in [None, "y", "n"]:
raise ValueError("Valid values for default are 'y', 'n' or None")
y = "Y" if default == "y" else "y"
n = "N" if default == "n" else "n"
while True:
ans = prompt_input("{} ({}/{}): ".format(msg, y, n))
if ans.lower() == n.lower():
return False
if ans.lower() == y.lower():
return True
if default and not ans:
return default == y.lower()
def prompt_input(msg):
return input("\n===> " + msg)
def _normalize_identifier_name(name):
normalized_name = name.lower()
normalized_name = re.sub(r"[\W_]", " ", normalized_name) # No non-word characters
normalized_name = re.sub(" +", " ", normalized_name).strip() # No double spaces, leading or trailing spaces
if re.match(r"\d", normalized_name):
normalized_name = "n" + normalized_name # No leading digits
return normalized_name
def _sanitize_python_variable_name(name: str):
return _normalize_identifier_name(name).replace(" ", "_")
| promptflow/src/promptflow/promptflow/_utils/utils.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_utils/utils.py",
"repo_id": "promptflow",
"token_count": 4054
} | 18 |
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.2, generator: @autorest/[email protected])
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._azure_machine_learning_designer_service_client import AzureMachineLearningDesignerServiceClient
__all__ = ['AzureMachineLearningDesignerServiceClient']
# `._patch.py` is used for handwritten extensions to the generated code
# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
from ._patch import patch_sdk
patch_sdk()
| promptflow/src/promptflow/promptflow/azure/_restclient/flow/__init__.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/azure/_restclient/flow/__init__.py",
"repo_id": "promptflow",
"token_count": 192
} | 19 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected])
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._flow_sessions_admin_operations import build_create_flow_session_request
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class FlowSessionsAdminOperations:
"""FlowSessionsAdminOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~flow.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def create_flow_session(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
session_id: str,
waitfor_completion: Optional[bool] = False,
body: Optional["_models.CreateFlowSessionRequest"] = None,
**kwargs: Any
) -> str:
"""create_flow_session.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param session_id:
:type session_id: str
:param waitfor_completion:
:type waitfor_completion: bool
:param body:
:type body: ~flow.models.CreateFlowSessionRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[str]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
if body is not None:
_json = self._serialize.body(body, "CreateFlowSessionRequest")
else:
_json = None
request = build_create_flow_session_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
session_id=session_id,
content_type=content_type,
json=_json,
waitfor_completion=waitfor_completion,
template_url=self.create_flow_session.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize("str", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_flow_session.metadata = {"url": "/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/FlowSessionsAdmin/{sessionId}"} # type: ignore
| promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_flow_sessions_admin_operations.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_flow_sessions_admin_operations.py",
"repo_id": "promptflow",
"token_count": 1822
} | 20 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
from ._flow_operations import FlowOperations
from ._run_operations import RunOperations
__all__ = ["FlowOperations", "RunOperations"]
| promptflow/src/promptflow/promptflow/azure/operations/__init__.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/azure/operations/__init__.py",
"repo_id": "promptflow",
"token_count": 93
} | 21 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from promptflow.exceptions import ErrorTarget, SystemErrorException, UserErrorException, ValidationException
class InputMappingError(ValidationException):
def __init__(self, target: ErrorTarget = ErrorTarget.EXECUTOR, **kwargs):
super().__init__(target=target, **kwargs)
class EmptyInputsData(UserErrorException):
pass
class ExecutorServiceUnhealthy(SystemErrorException):
pass
class BatchRunTimeoutError(UserErrorException):
pass
| promptflow/src/promptflow/promptflow/batch/_errors.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/batch/_errors.py",
"repo_id": "promptflow",
"token_count": 161
} | 22 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
# flake8: noqa
from .flow_executor import FlowExecutor
from .flow_validator import FlowValidator
| promptflow/src/promptflow/promptflow/executor/__init__.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/executor/__init__.py",
"repo_id": "promptflow",
"token_count": 54
} | 23 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from fastapi import APIRouter, Request
from promptflow._core.operation_context import OperationContext
from promptflow.executor._service.contracts.execution_request import FlowExecutionRequest, NodeExecutionRequest
from promptflow.executor._service.utils.service_utils import get_log_context, set_environment_variables
from promptflow.executor.flow_executor import FlowExecutor, execute_flow
from promptflow.storage._run_storage import DefaultRunStorage
router = APIRouter()
@router.post("/execution/flow")
async def flow_execution(request: Request, flow_request: FlowExecutionRequest):
OperationContext.get_instance().update(dict(request.headers))
# validate request
flow_request.validate_request()
# resolve environment variables
set_environment_variables(flow_request)
# execute flow
storage = DefaultRunStorage(base_dir=flow_request.working_dir, sub_dir=flow_request.output_dir)
with get_log_context(flow_request):
return execute_flow(
flow_request.flow_file,
flow_request.working_dir,
flow_request.output_dir,
flow_request.connections,
flow_request.inputs,
run_id=flow_request.run_id,
storage=storage,
)
@router.post("/execution/node")
async def node_execution(request: Request, node_request: NodeExecutionRequest):
OperationContext.get_instance().update(dict(request.headers))
# validate request
node_request.validate_request()
# resolve environment variables
set_environment_variables(node_request)
# execute node
with get_log_context(node_request):
storage = DefaultRunStorage(base_dir=node_request.working_dir, sub_dir=node_request.output_dir)
result = FlowExecutor.load_and_exec_node(
node_request.flow_file,
node_request.node_name,
flow_inputs=node_request.flow_inputs,
dependency_nodes_outputs=node_request.dependency_nodes_outputs,
connections=node_request.connections,
working_dir=node_request.working_dir,
storage=storage,
)
return result
| promptflow/src/promptflow/promptflow/executor/_service/apis/execution.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/executor/_service/apis/execution.py",
"repo_id": "promptflow",
"token_count": 819
} | 24 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from functools import partial
from pathlib import Path
from typing import Union
from promptflow._utils.multimedia_utils import _process_recursively, get_file_reference_encoder
from promptflow.contracts.multimedia import Image
from promptflow.contracts.run_info import FlowRunInfo
from promptflow.contracts.run_info import RunInfo as NodeRunInfo
class AbstractRunStorage:
def persist_node_run(self, run_info: NodeRunInfo):
"""Write the node run info to somewhere immediately after the node is executed.
:param run_info: The run info of the node.
:type run_info: ~promptflow.contracts.run_info.RunInfo
"""
raise NotImplementedError("AbstractRunStorage is an abstract class, no implementation for persist_node_run.")
def persist_flow_run(self, run_info: FlowRunInfo):
"""Write the flow run info to somewhere immediately after one line data is executed for the flow.
:param run_info: The run info of the node.
:type run_info: ~promptflow.contracts.run_info.RunInfo
"""
raise NotImplementedError("AbstractRunStorage is an abstract class, no implementation for persist_flow_run.")
class AbstractBatchRunStorage(AbstractRunStorage):
def load_node_run_info_for_line(self, line_number: int):
raise NotImplementedError(
"AbstractBatchRunStorage is an abstract class, no implementation for load_node_run_info_for_line."
)
def load_flow_run_info(self, line_number: int):
raise NotImplementedError(
"AbstractBatchRunStorage is an abstract class, no implementation for load_flow_run_info."
)
class DummyRunStorage(AbstractRunStorage):
def persist_node_run(self, run_info: NodeRunInfo):
"""Dummy implementation for persist_node_run
:param run_info: The run info of the node.
:type run_info: ~promptflow.contracts.run_info.RunInfo
"""
pass
def persist_flow_run(self, run_info: FlowRunInfo):
"""Dummy implementation for persist_flow_run
:param run_info: The run info of the node.
:type run_info: ~promptflow.contracts.run_info.RunInfo
"""
pass
class DefaultRunStorage(AbstractRunStorage):
def __init__(self, base_dir: Path = None, sub_dir: Path = None):
"""Initialize the default run storage.
:param base_dir: The base directory to store the multimedia data.
:type base_dir: Path
:param sub_dir: The sub directory to store the multimedia data.
:type sub_dir: Path
"""
self._base_dir = base_dir
self._sub_dir = sub_dir
def persist_run_info(self, run_info: Union[FlowRunInfo, NodeRunInfo]):
"""Persist the multimedia data in run info after execution.
:param run_info: The run info of the node or flow.
:type run_info: ~promptflow.contracts.run_info.RunInfo or ~promptflow.contracts.run_info.FlowRunInfo
"""
# Persist and convert images in inputs to path dictionaries.
# This replaces any image objects with their corresponding file path dictionaries.
if run_info.inputs:
run_info.inputs = self._persist_and_convert_images_to_path_dicts(run_info.inputs)
# Persist and convert images in output to path dictionaries.
# This replaces any image objects with their corresponding file path dictionaries.
if run_info.output:
serialized_output = self._persist_and_convert_images_to_path_dicts(run_info.output)
run_info.output = serialized_output
run_info.result = serialized_output
# Persist and convert images in api_calls to path dictionaries.
# The `inplace=True` parameter is used here to ensure that the original list structure holding generator outputs
# is maintained. This allows us to keep tracking the list as it dynamically changes when the generator is
# consumed. It is crucial to process the api_calls list in place to avoid losing the reference to the list that
# holds the generator items, which is essential for tracing generator execution.
if run_info.api_calls:
run_info.api_calls = self._persist_and_convert_images_to_path_dicts(run_info.api_calls, inplace=True)
def persist_node_run(self, run_info: NodeRunInfo):
"""Persist the multimedia data in node run info after the node is executed.
This method now delegates to the shared persist_run_info method.
:param run_info: The run info of the node.
:type run_info: NodeRunInfo
"""
self.persist_run_info(run_info)
def persist_flow_run(self, run_info: FlowRunInfo):
"""Persist the multimedia data in flow run info after one line data is executed for the flow.
This method now delegates to the shared persist_run_info method.
:param run_info: The run info of the flow.
:type run_info: FlowRunInfo
"""
self.persist_run_info(run_info)
def _persist_and_convert_images_to_path_dicts(self, value, inplace=False):
"""Persist image objects within a Python object to disk and convert them to path dictionaries.
This function recursively processes a given Python object, which can be a list, a dictionary, or a nested
combination of these, searching for image objects. Each image object encountered is serialized and saved to
disk in a pre-defined location using the `_base_dir` and `_sub_dir` attributes. The image object within the
original data structure is then replaced with a dictionary that indicates the file path of the serialized
image, following the format: `{'data:image/<ext>;path': '.promptflow/intermediate/<image_uuid>.<ext>'}`.
The operation can be performed in-place on the original object or on a new copy, depending on the value of
the `inplace` parameter. When `inplace` is set to `True`, the original object is modified; when set to `False`,
a new object with the converted path dictionaries is returned.
:param value: The Python object to be processed, potentially containing image objects.
:type value: Any
:param inplace: Whether to modify the original object in place (True) or to create a new object with converted
path dictionaries (False).
:type inplace: bool
:return: The original object with converted path dictionaries if `inplace` is True, otherwise a new object with
the conversions.
:rtype: Any
"""
if self._base_dir:
pfbytes_file_reference_encoder = get_file_reference_encoder(
folder_path=self._base_dir,
relative_path=self._sub_dir,
)
else:
pfbytes_file_reference_encoder = None
serialization_funcs = {Image: partial(Image.serialize, **{"encoder": pfbytes_file_reference_encoder})}
return _process_recursively(value, process_funcs=serialization_funcs, inplace=inplace)
| promptflow/src/promptflow/promptflow/storage/_run_storage.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/storage/_run_storage.py",
"repo_id": "promptflow",
"token_count": 2550
} | 25 |
# Prompt flow
[![Python package](https://img.shields.io/pypi/v/promptflow)](https://pypi.org/project/promptflow/)
[![Python](https://img.shields.io/pypi/pyversions/promptflow.svg?maxAge=2592000)](https://pypi.python.org/pypi/promptflow/)
[![PyPI - Downloads](https://img.shields.io/pypi/dm/promptflow)](https://pypi.org/project/promptflow/)
[![CLI](https://img.shields.io/badge/CLI-reference-blue)](https://microsoft.github.io/promptflow/reference/pf-command-reference.html)
[![vsc extension](https://img.shields.io/visual-studio-marketplace/i/prompt-flow.prompt-flow?logo=Visual%20Studio&label=Extension%20)](https://marketplace.visualstudio.com/items?itemName=prompt-flow.prompt-flow)
[![Doc](https://img.shields.io/badge/Doc-online-green)](https://microsoft.github.io/promptflow/index.html)
[![Issue](https://img.shields.io/github/issues/microsoft/promptflow)](https://github.com/microsoft/promptflow/issues/new/choose)
[![Discussions](https://img.shields.io/github/discussions/microsoft/promptflow)](https://github.com/microsoft/promptflow/issues/new/choose)
[![CONTRIBUTING](https://img.shields.io/badge/Contributing-8A2BE2)](https://github.com/microsoft/promptflow/blob/main/CONTRIBUTING.md)
[![License: MIT](https://img.shields.io/github/license/microsoft/promptflow)](https://github.com/microsoft/promptflow/blob/main/LICENSE)
> Welcome to join us to make prompt flow better by
> participating [discussions](https://github.com/microsoft/promptflow/discussions),
> opening [issues](https://github.com/microsoft/promptflow/issues/new/choose),
> submitting [PRs](https://github.com/microsoft/promptflow/pulls).
**Prompt flow** is a suite of development tools designed to streamline the end-to-end development cycle of LLM-based AI applications, from ideation, prototyping, testing, evaluation to production deployment and monitoring. It makes prompt engineering much easier and enables you to build LLM apps with production quality.
With prompt flow, you will be able to:
- **Create and iteratively develop flow**
- Create executable [flows](https://microsoft.github.io/promptflow/concepts/concept-flows.html) that link LLMs, prompts, Python code and other [tools](https://microsoft.github.io/promptflow/concepts/concept-tools.html) together.
- Debug and iterate your flows, especially the [interaction with LLMs](https://microsoft.github.io/promptflow/concepts/concept-connections.html) with ease.
- **Evaluate flow quality and performance**
- Evaluate your flow's quality and performance with larger datasets.
- Integrate the testing and evaluation into your CI/CD system to ensure quality of your flow.
- **Streamlined development cycle for production**
- Deploy your flow to the serving platform you choose or integrate into your app's code base easily.
- (Optional but highly recommended) Collaborate with your team by leveraging the cloud version of [Prompt flow in Azure AI](https://learn.microsoft.com/en-us/azure/machine-learning/prompt-flow/overview-what-is-prompt-flow?view=azureml-api-2).
------
## Installation
To get started quickly, you can use a pre-built development environment. **Click the button below** to open the repo in GitHub Codespaces, and then continue the readme!
[![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/microsoft/promptflow?quickstart=1)
If you want to get started in your local environment, first install the packages:
Ensure you have a python environment, `python=3.9` is recommended.
```sh
pip install promptflow promptflow-tools
```
## Quick Start ⚡
**Create a chatbot with prompt flow**
Run the command to initiate a prompt flow from a chat template, it creates folder named `my_chatbot` and generates required files within it:
```sh
pf flow init --flow ./my_chatbot --type chat
```
**Setup a connection for your API key**
For OpenAI key, establish a connection by running the command, using the `openai.yaml` file in the `my_chatbot` folder, which stores your OpenAI key (override keys and name with --set to avoid yaml file changes):
```sh
pf connection create --file ./my_chatbot/openai.yaml --set api_key=<your_api_key> --name open_ai_connection
```
For Azure OpenAI key, establish the connection by running the command, using the `azure_openai.yaml` file:
```sh
pf connection create --file ./my_chatbot/azure_openai.yaml --set api_key=<your_api_key> api_base=<your_api_base> --name open_ai_connection
```
**Chat with your flow**
In the `my_chatbot` folder, there's a `flow.dag.yaml` file that outlines the flow, including inputs/outputs, nodes, connection, and the LLM model, etc
> Note that in the `chat` node, we're using a connection named `open_ai_connection` (specified in `connection` field) and the `gpt-35-turbo` model (specified in `deployment_name` field). The deployment_name filed is to specify the OpenAI model, or the Azure OpenAI deployment resource.
Interact with your chatbot by running: (press `Ctrl + C` to end the session)
```sh
pf flow test --flow ./my_chatbot --interactive
```
**Core value: ensuring "High Quality” from prototype to production**
Explore our [**15-minute tutorial**](examples/tutorials/flow-fine-tuning-evaluation/promptflow-quality-improvement.md) that guides you through prompt tuning ➡ batch testing ➡ evaluation, all designed to ensure high quality ready for production.
Next Step! Continue with the **Tutorial** 👇 section to delve deeper into prompt flow.
## Tutorial 🏃♂️
Prompt flow is a tool designed to **build high quality LLM apps**, the development process in prompt flow follows these steps: develop a flow, improve the flow quality, deploy the flow to production.
### Develop your own LLM apps
#### VS Code Extension
We also offer a VS Code extension (a flow designer) for an interactive flow development experience with UI.
<img src="examples/tutorials/quick-start/media/vsc.png" alt="vsc" width="1000"/>
You can install it from the <a href="https://marketplace.visualstudio.com/items?itemName=prompt-flow.prompt-flow">visualstudio marketplace</a>.
#### Deep delve into flow development
[Getting started with prompt flow](./docs/cloud/azureai/quick-start/index.md): A step by step guidance to invoke your first flow run.
### Learn from use cases
[Tutorial: Chat with PDF](https://github.com/microsoft/promptflow/blob/main/examples/tutorials/e2e-development/chat-with-pdf.md): An end-to-end tutorial on how to build a high quality chat application with prompt flow, including flow development and evaluation with metrics.
> More examples can be found [here](https://microsoft.github.io/promptflow/tutorials/index.html#samples). We welcome contributions of new use cases!
### Setup for contributors
If you're interested in contributing, please start with our dev setup guide: [dev_setup.md](./docs/dev/dev_setup.md).
Next Step! Continue with the **Contributing** 👇 section to contribute to prompt flow.
## Contributing
This project welcomes contributions and suggestions. Most contributions require you to agree to a
Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
When you submit a pull request, a CLA bot will automatically determine whether you need to provide
a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions
provided by the bot. You will only need to do this once across all repos using our CLA.
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
contact [[email protected]](mailto:[email protected]) with any additional questions or comments.
## Trademarks
This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft
trademarks or logos is subject to and must follow
[Microsoft's Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks/usage/general).
Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship.
Any use of third-party trademarks or logos are subject to those third-party's policies.
## Code of Conduct
This project has adopted the
[Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
For more information see the
[Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
or contact [[email protected]](mailto:[email protected])
with any additional questions or comments.
## Data Collection
The software may collect information about you and your use of the software and
send it to Microsoft if configured to enable telemetry.
Microsoft may use this information to provide services and improve our products and services.
You may turn on the telemetry as described in the repository.
There are also some features in the software that may enable you and Microsoft
to collect data from users of your applications. If you use these features, you
must comply with applicable law, including providing appropriate notices to
users of your applications together with a copy of Microsoft's privacy
statement. Our privacy statement is located at
https://go.microsoft.com/fwlink/?LinkID=824704. You can learn more about data
collection and use in the help documentation and our privacy statement. Your
use of the software operates as your consent to these practices.
### Telemetry Configuration
Telemetry collection is on by default.
To opt out, please run `pf config set telemetry.enabled=false` to turn it off.
## License
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the [MIT](LICENSE) license.
| promptflow/README.md/0 | {
"file_path": "promptflow/README.md",
"repo_id": "promptflow",
"token_count": 2762
} | 0 |
With prompt flow, you can use variants to tune your prompt. In this article, you'll learn the prompt flow variants concept.
# Variants
A variant refers to a specific version of a tool node that has distinct settings. Currently, variants are supported only in the LLM tool. For example, in the LLM tool, a new variant can represent either a different prompt content or different connection settings.
Suppose you want to generate a summary of a news article. You can set different variants of prompts and settings like this:
| Variants | Prompt | Connection settings |
| --------- | ------------------------------------------------------------ | ------------------- |
| Variant 0 | `Summary: {{input sentences}}` | Temperature = 1 |
| Variant 1 | `Summary: {{input sentences}}` | Temperature = 0.7 |
| Variant 2 | `What is the main point of this article? {{input sentences}}` | Temperature = 1 |
| Variant 3 | `What is the main point of this article? {{input sentences}}` | Temperature = 0.7 |
By utilizing different variants of prompts and settings, you can explore how the model responds to various inputs and outputs, enabling you to discover the most suitable combination for your requirements.
## Benefits of using variants
- **Enhance the quality of your LLM generation**: By creating multiple variants of the same LLM node with diverse prompts and configurations, you can identify the optimal combination that produces high-quality content aligned with your needs.
- **Save time and effort**: Even slight modifications to a prompt can yield significantly different results. It's crucial to track and compare the performance of each prompt version. With variants, you can easily manage the historical versions of your LLM nodes, facilitating updates based on any variant without the risk of forgetting previous iterations. This saves you time and effort in managing prompt tuning history.
- **Boost productivity**: Variants streamline the optimization process for LLM nodes, making it simpler to create and manage multiple variations. You can achieve improved results in less time, thereby increasing your overall productivity.
- **Facilitate easy comparison**: You can effortlessly compare the results obtained from different variants side by side, enabling you to make data-driven decisions regarding the variant that generates the best outcomes.
## Next steps
- [Tune prompts with variants](../how-to-guides/tune-prompts-with-variants.md) | promptflow/docs/concepts/concept-variants.md/0 | {
"file_path": "promptflow/docs/concepts/concept-variants.md",
"repo_id": "promptflow",
"token_count": 642
} | 1 |
# Referencing external files/folders in a flow
Sometimes, pre-existing code assets are essential for the flow reference. In most cases, you can accomplish this by importing a Python package into your flow. However, if a Python package is not available or it is heavy to create a package, you can still reference external files or folders located outside of the current flow folder by using our **additional includes** feature in your flow configuration.
This feature provides an efficient mechanism to list relative file or folder paths that are outside of the flow folder, integrating them seamlessly into your flow.dag.yaml. For example:
```yaml
additional_includes:
- ../web-classification/classify_with_llm.jinja2
- ../web-classification/convert_to_dict.py
- ../web-classification/fetch_text_content_from_url.py
- ../web-classification/prepare_examples.py
- ../web-classification/summarize_text_content.jinja2
- ../web-classification/summarize_text_content__variant_1.jinja2
```
You can add this field `additional_includes` into the flow.dag.yaml. The value of this field is a list of the **relative file/folder path** to the flow folder.
Just as with the common definition of the tool node entry, you can define the tool node entry in the flow.dag.yaml using only the file name, eliminating the need to specify the relative path again. For example:
```yaml
nodes:
- name: fetch_text_content_from_url
type: python
source:
type: code
path: fetch_text_content_from_url.py
inputs:
url: ${inputs.url}
- name: summarize_text_content
use_variants: true
- name: prepare_examples
type: python
source:
type: code
path: prepare_examples.py
inputs: {}
```
The entry file "fetch_text_content_from_url.py" of the tool node "fetch_text_content_from_url" is located in "../web-classification/fetch_text_content_from_url.py", as specified in the additional_includes field. The same applies to the "summarize_text_content" tool nodes.
> **Note**:
>
> 1. If you have two files with the same name located in different folders specified in the `additional_includes` field, and the file name is also specified as the entry of a tool node, the system will reference the **last one** it encounters in the `additional_includes` field.
> > 1. If you have a file in the flow folder with the same name as a file specified in the `additional_includes` field, the system will prioritize the file listed in the `additional_includes` field.
Take the following YAML structure as an example:
```yaml
additional_includes:
- ../web-classification/prepare_examples.py
- ../tmp/prepare_examples.py
...
nodes:
- name: summarize_text_content
use_variants: true
- name: prepare_examples
type: python
source:
type: code
path: prepare_examples.py
inputs: {}
```
In this case, the system will use "../tmp/prepare_examples.py" as the entry file for the tool node "prepare_examples". Even if there is a file named "prepare_examples.py" in the flow folder, the system will still use the file "../tmp/prepare_examples.py" specified in the `additional_includes` field.
> Tips:
> The additional includes feature can significantly streamline your workflow by eliminating the need to manually handle these references.
> 1. To get a hands-on experience with this feature, practice with our sample [flow-with-additional-includes](https://github.com/microsoft/promptflow/tree/main/examples/flows/standard/flow-with-additional-includes).
> 1. You can learn more about [How the 'additional includes' flow operates during the transition to the cloud](../../cloud/azureai/quick-start/index.md#run-snapshot-of-the-flow-with-additional-includes). | promptflow/docs/how-to-guides/develop-a-flow/referencing-external-files-or-folders-in-a-flow.md/0 | {
"file_path": "promptflow/docs/how-to-guides/develop-a-flow/referencing-external-files-or-folders-in-a-flow.md",
"repo_id": "promptflow",
"token_count": 1055
} | 2 |
# Manage runs
:::{admonition} Experimental feature
This is an experimental feature, and may change at any time. Learn [more](faq.md#stable-vs-experimental).
:::
This documentation will walk you through how to manage your runs with CLI, SDK and VS Code Extension.
In general:
- For `CLI`, you can run `pf/pfazure run --help` in terminal to see the help messages.
- For `SDK`, you can refer to [Promptflow Python Library Reference](../reference/python-library-reference/promptflow.md) and check `PFClient.runs` for more run operations.
Let's take a look at the following topics:
- [Manage runs](#manage-runs)
- [Create a run](#create-a-run)
- [Get a run](#get-a-run)
- [Show run details](#show-run-details)
- [Show run metrics](#show-run-metrics)
- [Visualize a run](#visualize-a-run)
- [List runs](#list-runs)
- [Update a run](#update-a-run)
- [Archive a run](#archive-a-run)
- [Restore a run](#restore-a-run)
- [Delete a run](#delete-a-run)
## Create a run
::::{tab-set}
:::{tab-item} CLI
:sync: CLI
To create a run against bulk inputs, you can write the following YAML file.
```yaml
$schema: https://azuremlschemas.azureedge.net/promptflow/latest/Run.schema.json
flow: ../web_classification
data: ../webClassification1.jsonl
column_mapping:
url: "${data.url}"
variant: ${summarize_text_content.variant_0}
```
To create a run against existing run, you can write the following YAML file.
```yaml
$schema: https://azuremlschemas.azureedge.net/promptflow/latest/Run.schema.json
flow: ../classification_accuracy_evaluation
data: ../webClassification1.jsonl
column_mapping:
groundtruth: "${data.answer}"
prediction: "${run.outputs.category}"
run: <existing-flow-run-name>
```
Reference [here](https://aka.ms/pf/column-mapping) for detailed information for column mapping.
You can find additional information about flow yaml schema in [Run YAML Schema](../reference/run-yaml-schema-reference.md).
After preparing the yaml file, use the CLI command below to create them:
```bash
# create the flow run
pf run create -f <path-to-flow-run>
# create the flow run and stream output
pf run create -f <path-to-flow-run> --stream
```
The expected result is as follows if the run is created successfully.
![img](../media/how-to-guides/run_create.png)
:::
:::{tab-item} SDK
:sync: SDK
Using SDK, create `Run` object and submit it with `PFClient`. The following code snippet shows how to import the required class and create the run:
```python
from promptflow import PFClient
from promptflow.entities import Run
# Get a pf client to manage runs
pf = PFClient()
# Initialize an Run object
run = Run(
flow="<path-to-local-flow>",
# run flow against local data or existing run, only one of data & run can be specified.
data="<path-to-data>",
run="<existing-run-name>",
column_mapping={"url": "${data.url}"},
variant="${summarize_text_content.variant_0}"
)
# Create the run
result = pf.runs.create_or_update(run)
print(result)
```
:::
:::{tab-item} VS Code Extension
:sync: VS Code Extension
You can click on the actions on the top of the default yaml editor or the visual editor for the flow.dag.yaml files to trigger flow batch runs.
![img](../media/how-to-guides/vscode_batch_run_yaml.png)
![img](../media/how-to-guides/vscode_batch_run_visual.png)
:::
::::
## Get a run
::::{tab-set}
:::{tab-item} CLI
:sync: CLI
Get a run in CLI with JSON format.
```bash
pf run show --name <run-name>
```
![img](../media/how-to-guides/run_show.png)
:::
:::{tab-item} SDK
:sync: SDK
Show run with `PFClient`
```python
from promptflow import PFClient
# Get a pf client to manage runs
pf = PFClient()
# Get and print the run
run = pf.runs.get(name="<run-name>")
print(run)
```
:::
:::{tab-item} VS Code Extension
:sync: VSC
![img](../media/how-to-guides/vscode_run_detail.png)
:::
::::
## Show run details
::::{tab-set}
:::{tab-item} CLI
:sync: CLI
Get run details with TABLE format.
```bash
pf run show --name <run-name>
```
![img](../media/how-to-guides/run_show_details.png)
:::
:::{tab-item} SDK
:sync: SDK
Show run details with `PFClient`
```python
from promptflow import PFClient
from tabulate import tabulate
# Get a pf client to manage runs
pf = PFClient()
# Get and print the run-details
run_details = pf.runs.get_details(name="<run-name>")
print(tabulate(details.head(max_results), headers="keys", tablefmt="grid"))
```
:::
:::{tab-item} VS Code Extension
:sync: VSC
![img](../media/how-to-guides/vscode_run_detail.png)
:::
::::
## Show run metrics
::::{tab-set}
:::{tab-item} CLI
:sync: CLI
Get run metrics with JSON format.
```bash
pf run show-metrics --name <run-name>
```
![img](../media/how-to-guides/run_show_metrics.png)
:::
:::{tab-item} SDK
:sync: SDK
Show run metrics with `PFClient`
```python
from promptflow import PFClient
import json
# Get a pf client to manage runs
pf = PFClient()
# Get and print the run-metrics
run_details = pf.runs.get_metrics(name="<run-name>")
print(json.dumps(metrics, indent=4))
```
:::
::::
## Visualize a run
::::{tab-set}
:::{tab-item} CLI
:sync: CLI
Visualize run in browser.
```bash
pf run visualize --names <run-name>
```
A browser will open and display run outputs.
![img](../media/how-to-guides/run_visualize.png)
:::
:::{tab-item} SDK
:sync: SDK
Visualize run with `PFClient`
```python
from promptflow import PFClient
# Get a pf client to manage runs
pf = PFClient()
# Visualize the run
client.runs.visualize(runs="<run-name>")
```
:::
:::{tab-item} VS Code Extension
:sync: VSC
On the VS Code primary sidebar > the prompt flow pane, there is a run list. It will list all the runs on your machine. Select one or more items and click the "visualize" button on the top-right to visualize the local runs.
![img](../media/how-to-guides/vscode_run_actions.png)
:::
::::
## List runs
::::{tab-set}
:::{tab-item} CLI
:sync: CLI
List runs with JSON format.
```bash
pf run list
```
![img](../media/how-to-guides/run_list.png)
:::
:::{tab-item} SDK
:sync: SDK
List with `PFClient`
```python
from promptflow import PFClient
# Get a pf client to manage runs
pf = PFClient()
# list runs
runs = pf.runs.list()
print(runs)
```
:::
:::{tab-item} VS Code Extension
:sync: VSC
On the VS Code primary sidebar > the prompt flow pane, there is a run list. It will list all the runs on your machine. Hover on it to view more details.
![img](../media/how-to-guides/vscode_list_runs.png)
:::
::::
## Update a run
::::{tab-set}
:::{tab-item} CLI
:sync: CLI
Get run metrics with JSON format.
```bash
pf run update --name <run-name> --set display_name=new_display_name
```
:::
:::{tab-item} SDK
:sync: SDK
Update run with `PFClient`
```python
from promptflow import PFClient
# Get a pf client to manage runs
pf = PFClient()
# Get and print the run-metrics
run = pf.runs.update(name="<run-name>", display_name="new_display_name")
print(run)
```
:::
::::
## Archive a run
::::{tab-set}
:::{tab-item} CLI
:sync: CLI
Archive the run so it won't show in run list results.
```bash
pf run archive --name <run-name>
```
:::
:::{tab-item} SDK
:sync: SDK
Archive with `PFClient`
```python
from promptflow import PFClient
# Get a pf client to manage runs
pf = PFClient()
# archive a run
client.runs.archive(name="<run-name>")
```
:::
:::{tab-item} VS Code Extension
:sync: VSC
![img](../media/how-to-guides/vscode_run_actions.png)
:::
::::
## Restore a run
::::{tab-set}
:::{tab-item} CLI
:sync: CLI
Restore an archived run so it can show in run list results.
```bash
pf run restore --name <run-name>
```
:::
:::{tab-item} SDK
:sync: SDK
Restore with `PFClient`
```python
from promptflow import PFClient
# Get a pf client to manage runs
pf = PFClient()
# restore a run
client.runs.restore(name="<run-name>")
```
:::
::::
## Delete a run
::::{tab-set}
:::{tab-item} CLI
:sync: CLI
Caution: pf run delete is irreversible. This operation will delete the run permanently from your local disk. Both run entity and output data will be deleted.
Delete will fail if the run name is not valid.
```bash
pf run delete --name <run-name>
```
:::
:::{tab-item} SDK
:sync: SDK
Delete with `PFClient`
```python
from promptflow import PFClient
# Get a pf client to manage runs
pf = PFClient()
# delete a run
client.runs.delete(name="run-name")
```
:::
:::: | promptflow/docs/how-to-guides/manage-runs.md/0 | {
"file_path": "promptflow/docs/how-to-guides/manage-runs.md",
"repo_id": "promptflow",
"token_count": 2998
} | 3 |
# Avoid circular dependencies: Use import 'from promptflow._internal' instead of 'from promptflow'
# since the code here is in promptflow namespace as well
from promptflow._internal import tool
from promptflow.tools.common import render_jinja_template
@tool
def render_template_jinja2(template: str, **kwargs) -> str:
return render_jinja_template(template, trim_blocks=True, keep_trailing_newline=True, **kwargs)
| promptflow/src/promptflow-tools/promptflow/tools/template_rendering.py/0 | {
"file_path": "promptflow/src/promptflow-tools/promptflow/tools/template_rendering.py",
"repo_id": "promptflow",
"token_count": 117
} | 4 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from promptflow._sdk._configuration import Configuration
# This logic is copied from: https://github.com/microsoft/knack/blob/dev/knack/help.py
# Will print privacy message and welcome when user run `pf` command.
PRIVACY_STATEMENT = """
Welcome to prompt flow!
---------------------
Use `pf -h` to see available commands or go to https://aka.ms/pf-cli.
Telemetry
---------
The prompt flow CLI collects usage data in order to improve your experience.
The data is anonymous and does not include commandline argument values.
The data is collected by Microsoft.
You can change your telemetry settings with `pf config`.
"""
WELCOME_MESSAGE = r"""
____ _ __ _
| _ \ _ __ ___ _ __ ___ _ __ | |_ / _| | _____ __
| |_) | '__/ _ \| '_ ` _ \| '_ \| __| | |_| |/ _ \ \ /\ / /
| __/| | | (_) | | | | | | |_) | |_ | _| | (_) \ V V /
|_| |_| \___/|_| |_| |_| .__/ \__| |_| |_|\___/ \_/\_/
|_|
Welcome to the cool prompt flow CLI!
Use `pf --version` to display the current version.
Here are the base commands:
"""
def show_privacy_statement():
config = Configuration.get_instance()
ran_before = config.get_config("first_run")
if not ran_before:
print(PRIVACY_STATEMENT)
config.set_config("first_run", True)
def show_welcome_message():
print(WELCOME_MESSAGE)
| promptflow/src/promptflow/promptflow/_cli/_pf/help.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_cli/_pf/help.py",
"repo_id": "promptflow",
"token_count": 563
} | 5 |
{
"package": {},
"code": {
{% for key, prompt_obj in prompt_params.items() %}
"{{ key }}": {
"type": "prompt",
"inputs": {
{% for input_name, value in prompt_obj.get("inputs", {}).items() %}
"{{ input_name }}": {
"type": [
{% for typ in value["type"] %}
"{{ typ.value }}"
{% endfor %}
]
}{{ "," if not loop.last else "" }}
{% endfor %}
},
"source": "{{ prompt_obj.source }}"
},
{% endfor %}
"{{ tool_file }}": {
"type": "python",
"inputs": {
{% for arg, typ in tool_meta_args.items() %}
"{{ arg }}": {
"type": [
"{{ typ }}"
]
},
{% endfor %}
"connection": {
"type": [
"CustomConnection"
]
}
},
"function": "{{ tool_function }}",
"source": "{{ tool_file }}"
}
}
}
| promptflow/src/promptflow/promptflow/_cli/data/entry_flow/flow.tools.json.jinja2/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_cli/data/entry_flow/flow.tools.json.jinja2",
"repo_id": "promptflow",
"token_count": 779
} | 6 |
$schema: https://azuremlschemas.azureedge.net/promptflow/latest/Flow.schema.json
inputs:
text:
type: string
outputs:
output_prompt:
type: string
reference: ${echo_my_prompt.output}
nodes:
- name: hello_prompt
type: prompt
source:
type: code
path: hello.jinja2
inputs:
text: ${inputs.text}
- name: echo_my_prompt
type: python
source:
type: code
path: hello.py
inputs:
input1: ${hello_prompt.output}
environment:
python_requirements_txt: requirements.txt
| promptflow/src/promptflow/promptflow/_cli/data/standard_flow/flow.dag.yaml/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_cli/data/standard_flow/flow.dag.yaml",
"repo_id": "promptflow",
"token_count": 205
} | 7 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from contextvars import ContextVar
from typing import Type, TypeVar
T = TypeVar("T")
class ThreadLocalSingleton:
# Use context variable to enable thread local singleton
# See reference: https://docs.python.org/3/library/contextvars.html#contextvars.ContextVar
CONTEXT_VAR_NAME = "ThreadLocalSingleton"
context_var = ContextVar(CONTEXT_VAR_NAME, default=None)
@classmethod
def active_instance(cls: Type[T]) -> T:
return cls.context_var.get()
@classmethod
def active(cls) -> bool:
return cls.active_instance() is not None
def _activate_in_context(self, force=False):
instance = self.active_instance()
if instance is not None and instance is not self and not force:
raise NotImplementedError(f"Cannot set active since there is another active instance: {instance}")
self.context_var.set(self)
def _deactivate_in_context(self):
self.context_var.set(None)
| promptflow/src/promptflow/promptflow/_core/thread_local_singleton.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_core/thread_local_singleton.py",
"repo_id": "promptflow",
"token_count": 370
} | 8 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from sqlalchemy import TEXT, Column
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import declarative_base
from promptflow._sdk._constants import EXP_NODE_RUN_TABLE_NAME, ExperimentNodeRunStatus
from promptflow._sdk._errors import ExperimentNodeRunNotFoundError
from .retry import sqlite_retry
from .session import mgmt_db_session
Base = declarative_base()
class ExperimentNodeRun(Base):
__tablename__ = EXP_NODE_RUN_TABLE_NAME
run_id = Column(TEXT, primary_key=True)
snapshot_id = Column(TEXT)
node_name = Column(TEXT, nullable=False)
experiment_name = Column(TEXT, nullable=False)
status = Column(TEXT, nullable=False)
# schema version, increase the version number when you change the schema
__pf_schema_version__ = "1"
@staticmethod
@sqlite_retry
def create_or_update(node_run: "ExperimentNodeRun") -> None:
session = mgmt_db_session()
run_id = node_run.run_id
try:
session.add(node_run)
session.commit()
except IntegrityError:
session = mgmt_db_session()
# Remove the _sa_instance_state
update_dict = {k: v for k, v in node_run.__dict__.items() if not k.startswith("_")}
session.query(ExperimentNodeRun).filter(ExperimentNodeRun.run_id == run_id).update(update_dict)
session.commit()
@staticmethod
@sqlite_retry
def delete(snapshot_id: str) -> None:
with mgmt_db_session() as session:
session.query(ExperimentNodeRun).filter(ExperimentNodeRun.snapshot_id == snapshot_id).delete()
session.commit()
@staticmethod
@sqlite_retry
def get(run_id: str, raise_error=True) -> "ExperimentNodeRun":
with mgmt_db_session() as session:
orchestrator = session.query(ExperimentNodeRun).filter(ExperimentNodeRun.run_id == run_id).first()
if orchestrator is None and raise_error:
raise ExperimentNodeRunNotFoundError(f"Not found the node run {run_id!r}.")
return orchestrator
@staticmethod
@sqlite_retry
def get_completed_node_by_snapshot_id(
snapshot_id: str, experiment_name: str, raise_error=True
) -> "ExperimentNodeRun":
with mgmt_db_session() as session:
node_run = (
session.query(ExperimentNodeRun)
.filter(
ExperimentNodeRun.snapshot_id == snapshot_id,
ExperimentNodeRun.experiment_name == experiment_name,
ExperimentNodeRun.status == ExperimentNodeRunStatus.COMPLETED,
)
.first()
)
if node_run is None and raise_error:
raise ExperimentNodeRunNotFoundError(
f"Not found the completed node run with snapshot id {snapshot_id!r}."
)
return node_run
@staticmethod
@sqlite_retry
def get_node_runs_by_experiment(experiment_name: str) -> "ExperimentNodeRun":
with mgmt_db_session() as session:
node_runs = (
session.query(ExperimentNodeRun).filter(ExperimentNodeRun.experiment_name == experiment_name).all()
)
return node_runs
@sqlite_retry
def update_status(self, status: str) -> None:
update_dict = {"status": status}
with mgmt_db_session() as session:
session.query(ExperimentNodeRun).filter(ExperimentNodeRun.run_id == self.run_id).update(update_dict)
session.commit()
| promptflow/src/promptflow/promptflow/_sdk/_orm/experiment_node_run.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/_orm/experiment_node_run.py",
"repo_id": "promptflow",
"token_count": 1544
} | 9 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from flask import jsonify, make_response, request
from flask_restx import fields
from promptflow._sdk._service import Namespace, Resource
from promptflow._sdk._service.utils.utils import build_pfs_user_agent, local_user_only
from promptflow._sdk._telemetry import ActivityCompletionStatus, ActivityType
from promptflow._utils.utils import camel_to_snake
from promptflow.exceptions import UserErrorException
api = Namespace("Telemetries", description="Telemetry Management")
class EventType:
START = "Start"
END = "End"
class AllowedActivityName:
FLOW_TEST = "pf.flow.test"
FLOW_NODE_TEST = "pf.flow.node_test"
GENERATE_TOOL_META = "pf.flow._generate_tools_meta"
REQUEST_ID_KEY = "x-ms-promptflow-request-id"
def _dict_camel_to_snake(data):
if isinstance(data, dict):
result = {}
for key, value in data.items():
result[camel_to_snake(key)] = _dict_camel_to_snake(value)
return result
else:
return data
def parse_activity_info(metadata, first_call, user_agent, request_id):
request_id = request_id
return {
"request_id": request_id,
"first_call": first_call,
"user_agent": user_agent,
**_dict_camel_to_snake(metadata),
}
def validate_metadata(value: dict) -> dict:
allowed_activity_names = [
AllowedActivityName.FLOW_TEST,
AllowedActivityName.FLOW_NODE_TEST,
AllowedActivityName.GENERATE_TOOL_META,
]
if value.get("activityName", None) not in allowed_activity_names:
raise UserErrorException(f"metadata.activityName must be one of {', '.join(allowed_activity_names)}.")
allowed_activity_types = [
ActivityType.INTERNALCALL,
ActivityType.PUBLICAPI,
]
if value.get("activityType") not in allowed_activity_types:
raise UserErrorException(f"metadata.activityType must be one of {', '.join(allowed_activity_types)}")
return value
def validate_metadata_based_on_event_type(metadata: dict, event_type: str):
if event_type == EventType.END:
if not all(
key in metadata
for key in (
"completionStatus", # End event should have completionStatus
"durationMs", # End event should have durationMs
)
):
missing_fields = {"completionStatus", "durationMs"} - set(metadata.keys())
raise UserErrorException(f"Missing required fields in telemetry metadata: {', '.join(missing_fields)}")
if metadata.get("completionStatus") == ActivityCompletionStatus.FAILURE:
if not all(
key in metadata
for key in (
"errorCategory", # Failure event should have errorCategory
"errorType", # Failure event should have errorType
"errorTarget", # Failure event should have errorTarget
"errorMessage", # Failure event should have errorMessage
)
):
missing_fields = {"errorCategory", "errorType", "errorTarget", "errorMessage"} - set(metadata.keys())
raise UserErrorException(f"Missing required fields in telemetry payload: {', '.join(missing_fields)}")
def validate_event_type(value) -> str:
if value not in (EventType.START, EventType.END):
raise ValueError(f"Event type must be one of {EventType.START} and {EventType.END}.")
return value
metadata_model = api.model(
"Metadata",
{
"activityName": fields.String(
required=True,
description="The name of the activity.",
enum=[
AllowedActivityName.FLOW_TEST,
AllowedActivityName.FLOW_NODE_TEST,
AllowedActivityName.GENERATE_TOOL_META,
],
),
"activityType": fields.String(required=True, description="The type of the activity."),
"completionStatus": fields.String(
required=False,
description="The completion status of the activity.",
enum=[ActivityCompletionStatus.SUCCESS, ActivityCompletionStatus.FAILURE],
),
"durationMs": fields.Integer(required=False, description="The duration of the activity in milliseconds."),
"errorCategory": fields.String(required=False, description="The error category of the activity."),
"errorType": fields.String(required=False, description="The error type of the activity."),
"errorTarget": fields.String(required=False, description="The error target of the activity."),
"errorMessage": fields.String(required=False, description="The error message of the activity."),
"errorDetails": fields.String(required=False, description="The error details of the activity."),
},
)
telemetry_model = api.model(
"Telemetry",
{
"eventType": fields.String(
required=True,
description="The event type of the telemetry.",
enum=[EventType.START, EventType.END],
),
"timestamp": fields.DateTime(required=True, description="The timestamp of the telemetry."),
"firstCall": fields.Boolean(
required=False,
default=True,
description="Whether current activity is the first activity in the call chain.",
),
"metadata": fields.Nested(metadata_model),
},
)
@api.route("/")
class Telemetry(Resource):
@api.header(REQUEST_ID_KEY, type=str)
@api.response(code=200, description="Create telemetry record")
@api.response(code=400, description="Input payload validation failed")
@api.doc(description="Create telemetry record")
@api.expect(telemetry_model)
@local_user_only
@api.response(code=403, description="Telemetry is disabled or X-Remote-User is not set.")
def post(self):
from promptflow._sdk._telemetry import get_telemetry_logger, is_telemetry_enabled
from promptflow._sdk._telemetry.activity import log_activity_end, log_activity_start
if not is_telemetry_enabled():
return make_response(
jsonify(
{
"message": "Telemetry is disabled, you may re-enable it "
"via `pf config set telemetry.enabled=true`."
}
),
403,
)
request_id = request.headers.get(REQUEST_ID_KEY)
try:
validate_metadata_based_on_event_type(api.payload["metadata"], api.payload["eventType"])
except UserErrorException as exception:
return make_response(
jsonify({"errors": {"metadata": str(exception)}, "message": "Input payload validation failed"}), 400
)
activity_info = parse_activity_info(
metadata=api.payload["metadata"],
first_call=api.payload.get("firstCall", True),
user_agent=build_pfs_user_agent(),
request_id=request_id,
)
if api.payload["eventType"] == EventType.START:
log_activity_start(activity_info, get_telemetry_logger())
elif api.payload["eventType"] == EventType.END:
log_activity_end(activity_info, get_telemetry_logger())
return jsonify(
{
"status": ActivityCompletionStatus.SUCCESS,
}
)
| promptflow/src/promptflow/promptflow/_sdk/_service/apis/telemetry.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/_service/apis/telemetry.py",
"repo_id": "promptflow",
"token_count": 3067
} | 10 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from .run_submitter import RunSubmitter
from .test_submitter import TestSubmitter
from .utils import (
overwrite_connections,
overwrite_flow,
overwrite_variant,
remove_additional_includes,
variant_overwrite_context,
)
__all__ = [
"RunSubmitter",
"TestSubmitter",
"overwrite_variant",
"variant_overwrite_context",
"remove_additional_includes",
"overwrite_connections",
"overwrite_flow",
]
| promptflow/src/promptflow/promptflow/_sdk/_submitter/__init__.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/_submitter/__init__.py",
"repo_id": "promptflow",
"token_count": 185
} | 11 |
# syntax=docker/dockerfile:1
{% if env.image %}
FROM {{env.image}}
{% else %}
{% if show_comment %}
# use mcr.microsoft.com/azureml/openmpi4.1.0-ubuntu20.04:latest? current image is based on Debian 11
{% endif %}
FROM docker.io/continuumio/miniconda3:latest
{% endif %}
WORKDIR /
{% if env.python_requirements_txt %}
COPY ./flow/{{env.python_requirements_txt}} /flow/{{env.python_requirements_txt}}
{% endif %}
# gcc is for build psutil in MacOS
RUN apt-get update && apt-get install -y runit gcc
# create conda environment
{% if env.conda_file %}
COPY ./flow/{{env.conda_file}} /flow/{{env.conda_file}}
RUN conda create -f flow/{{env.conda_file}} -q && \
{% else %}
RUN conda create -n {{env.conda_env_name}} python=3.9.16 pip=23.0.1 -q -y && \
{% endif %}
conda run -n {{env.conda_env_name}} \
{% if env.python_requirements_txt %}
pip install -r /flow/{{env.python_requirements_txt}} && \
{% else %}
{% if env.sdk_version %}
pip install promptflow=={{env.sdk_version}} \
{% else %}
pip install promptflow \
{% endif %}
promptflow-tools && \
{% endif %}
conda run -n {{env.conda_env_name}} pip install keyrings.alt && \
conda run -n {{env.conda_env_name}} pip install gunicorn==20.1.0 && \
conda run -n {{env.conda_env_name}} pip cache purge && \
conda clean -a -y
COPY ./flow /flow
{% if env.setup_sh %}
RUN conda run -n {{env.conda_env_name}} sh /flow/{{ env.setup_sh }}
{% endif %}
EXPOSE 8080
COPY ./connections/* /connections/
# reset runsvdir
RUN rm -rf /var/runit
COPY ./runit /var/runit
# grant permission
RUN chmod -R +x /var/runit
COPY ./start.sh /
CMD ["bash", "./start.sh"]
| promptflow/src/promptflow/promptflow/_sdk/data/docker/Dockerfile.jinja2/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/data/docker/Dockerfile.jinja2",
"repo_id": "promptflow",
"token_count": 680
} | 12 |
import base64
import json
import re
import streamlit as st
from bs4 import BeautifulSoup, NavigableString, Tag
from promptflow._utils.multimedia_utils import MIME_PATTERN, is_multimedia_dict
def show_image(image, key=None):
col1, _ = st.columns(2)
with col1:
if not image.startswith("data:image"):
st.image(key + "," + image, use_column_width="auto")
else:
st.image(image, use_column_width="auto")
def json_dumps(value):
try:
return json.dumps(value, ensure_ascii=False)
except Exception:
return value
def is_list_contains_rich_text(rich_text):
result = False
for item in rich_text:
if isinstance(item, list):
result |= is_list_contains_rich_text(item)
elif isinstance(item, dict):
result |= is_dict_contains_rich_text(item)
else:
if isinstance(item, str) and item.startswith("data:image"):
result = True
return result
def is_dict_contains_rich_text(rich_text):
result = False
for rich_text_key, rich_text_value in rich_text.items():
if isinstance(rich_text_value, list):
result |= is_list_contains_rich_text(rich_text_value)
elif isinstance(rich_text_value, dict):
result |= is_dict_contains_rich_text(rich_text_value)
elif re.match(MIME_PATTERN, rich_text_key) or (
isinstance(rich_text_value, str) and rich_text_value.startswith("data:image")
):
result = True
return result
def item_render_message(value, key=None):
if key and re.match(MIME_PATTERN, key):
show_image(value, key)
elif isinstance(value, str) and value.startswith("data:image"):
show_image(value)
else:
if key is None:
st.markdown(f"{json_dumps(value)},")
else:
st.markdown(f"{key}: {json_dumps(value)},")
def list_iter_render_message(message_items):
if is_list_contains_rich_text(message_items):
st.markdown("[ ")
for item in message_items:
if isinstance(item, list):
list_iter_render_message(item)
if isinstance(item, dict):
dict_iter_render_message(item)
else:
item_render_message(item)
st.markdown("], ")
else:
st.markdown(f"{json_dumps(message_items)},")
def dict_iter_render_message(message_items):
if is_multimedia_dict(message_items):
key = list(message_items.keys())[0]
value = message_items[key]
show_image(value, key)
elif is_dict_contains_rich_text(message_items):
st.markdown("{ ")
for key, value in message_items.items():
if re.match(MIME_PATTERN, key):
show_image(value, key)
else:
if isinstance(value, list):
st.markdown(f"{key}: ")
list_iter_render_message(value)
elif isinstance(value, dict):
st.markdown(f"{key}: ")
dict_iter_render_message(value)
else:
item_render_message(value, key)
st.markdown("}, ")
else:
st.markdown(f"{json_dumps(message_items)},")
def render_single_list_message(message_items):
# This function is added for chat flow with only single input and single output.
# So that we can show the message directly without the list and dict wrapper.
for item in message_items:
if isinstance(item, list):
render_single_list_message(item)
elif isinstance(item, dict):
render_single_dict_message(item)
elif isinstance(item, str):
st.text(item)
def render_single_dict_message(message_items):
# This function is added for chat flow with only single input and single output.
# So that we can show the message directly without the list and dict wrapper.
for key, value in message_items.items():
if re.match(MIME_PATTERN, key):
show_image(value, key)
continue
else:
if isinstance(value, list):
render_single_list_message(value)
elif isinstance(value, dict):
render_single_dict_message(value)
else:
item_render_message(value, key)
def extract_content(node):
if isinstance(node, NavigableString):
text = node.strip()
if text:
return [text]
elif isinstance(node, Tag):
if node.name == "img":
prefix, base64_str = node["src"].split(",", 1)
return [{prefix: base64_str}]
else:
result = []
for child in node.contents:
result.extend(extract_content(child))
return result
return []
def parse_list_from_html(html_content):
"""
Parse the html content to a list of strings and images.
"""
soup = BeautifulSoup(html_content, "html.parser")
result = []
for p in soup.find_all("p"):
result.extend(extract_content(p))
return result
def parse_image_content(image_content, image_type):
if image_content is not None:
file_contents = image_content.read()
image_content = base64.b64encode(file_contents).decode("utf-8")
prefix = f"data:{image_type};base64"
return {prefix: image_content}
| promptflow/src/promptflow/promptflow/_sdk/data/executable/utils.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/data/executable/utils.py",
"repo_id": "promptflow",
"token_count": 2495
} | 13 |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from datetime import datetime
from typing import List
from promptflow._sdk._constants import MAX_LIST_CLI_RESULTS
from promptflow._sdk._orm import Connection as ORMConnection
from promptflow._sdk._telemetry import ActivityType, TelemetryMixin, monitor_operation
from promptflow._sdk._utils import safe_parse_object_list
from promptflow._sdk.entities._connection import _Connection
class ConnectionOperations(TelemetryMixin):
"""ConnectionOperations."""
def __init__(self, **kwargs):
super().__init__(**kwargs)
@monitor_operation(activity_name="pf.connections.list", activity_type=ActivityType.PUBLICAPI)
def list(
self,
max_results: int = MAX_LIST_CLI_RESULTS,
all_results: bool = False,
) -> List[_Connection]:
"""List connections.
:param max_results: Max number of results to return.
:type max_results: int
:param all_results: Return all results.
:type all_results: bool
:return: List of run objects.
:rtype: List[~promptflow.sdk.entities._connection._Connection]
"""
orm_connections = ORMConnection.list(max_results=max_results, all_results=all_results)
return safe_parse_object_list(
obj_list=orm_connections,
parser=_Connection._from_orm_object,
message_generator=lambda x: f"Failed to load connection {x.connectionName}, skipped.",
)
@monitor_operation(activity_name="pf.connections.get", activity_type=ActivityType.PUBLICAPI)
def get(self, name: str, **kwargs) -> _Connection:
"""Get a connection entity.
:param name: Name of the connection.
:type name: str
:return: connection object retrieved from the database.
:rtype: ~promptflow.sdk.entities._connection._Connection
"""
return self._get(name, **kwargs)
def _get(self, name: str, **kwargs) -> _Connection:
with_secrets = kwargs.get("with_secrets", False)
raise_error = kwargs.get("raise_error", True)
orm_connection = ORMConnection.get(name, raise_error)
if orm_connection is None:
return None
if with_secrets:
return _Connection._from_orm_object_with_secrets(orm_connection)
return _Connection._from_orm_object(orm_connection)
@monitor_operation(activity_name="pf.connections.delete", activity_type=ActivityType.PUBLICAPI)
def delete(self, name: str) -> None:
"""Delete a connection entity.
:param name: Name of the connection.
:type name: str
"""
ORMConnection.delete(name)
@monitor_operation(activity_name="pf.connections.create_or_update", activity_type=ActivityType.PUBLICAPI)
def create_or_update(self, connection: _Connection, **kwargs):
"""Create or update a connection.
:param connection: Run object to create or update.
:type connection: ~promptflow.sdk.entities._connection._Connection
"""
orm_object = connection._to_orm_object()
now = datetime.now().isoformat()
if orm_object.createdDate is None:
orm_object.createdDate = now
orm_object.lastModifiedDate = now
ORMConnection.create_or_update(orm_object)
return self.get(connection.name)
| promptflow/src/promptflow/promptflow/_sdk/operations/_connection_operations.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_sdk/operations/_connection_operations.py",
"repo_id": "promptflow",
"token_count": 1297
} | 14 |
import re
from dataclasses import dataclass
from enum import Enum
from functools import partial
from pathlib import Path
from typing import Any, Callable
from promptflow._utils.multimedia_utils import is_multimedia_dict
class ResourceType(Enum):
"""
Enumeration of different types of multimedia resources.
We support path, URL, and base64 data.
"""
PATH = "path"
URL = "url"
BASE64 = "base64"
@dataclass
class MultimediaInfo:
"""
Data class that holds information about a multimedia resource.
"""
mime_type: str # The MIME type of the multimedia resource.
resource_type: ResourceType # The type of the resource as defined in ResourceType.
content: str # The content of the multimedia resource (path, URL, or base64 string).
class AbstractMultimediaFormatAdapter:
"""
Abstract base class for adapting multimedia formats.
This class provides an interface for extracting multimedia information
from various data formats or constructing data formats from multimedia information.
Subclasses should implement methods for specific contract version.
A MultimediaInfo object contains the mime_type, resource_type, and the actual content
of the multimedia resource.
The multimedia data is typically represented as a dictionary
with keys and values conforming to a specific multimedia data contract.
One multimedia data example from 20231201 version: {"data:image/jpg;path": "logo.jpg"}
"""
# Check if the original_data is a multimedia format according to the current contract version.
def is_valid_format(self, original_data: Any):
raise NotImplementedError()
def extract_info(self, original_data: Any) -> MultimediaInfo:
"""
Get the MultimediaInfo from the original data. Will include mime_type, resource_type, and content.
Below is an example for the 20231201 version:
{"data:image/jpg;path": "logo.jpg"} -> "image/jpg", "path", "logo.jpg"
"""
raise NotImplementedError()
def create_data(self, info: MultimediaInfo) -> Any:
"""
Create multimedia data from info. Below is an example for the 20231201 version:
"image/jpg", "path", "logo.jpg" -> {"data:image/jpg;path": "logo.jpg"}
"""
raise NotImplementedError()
class MultimediaFormatAdapter20231201(AbstractMultimediaFormatAdapter):
"""
20231201 version is our first contract's version, supports text and images (path/url/base64).
20231201 is the version number assigned by the customer in the YAML file.
Path format example: {"data:image/jpg;path": "logo.jpg"}
Url format example: {"data:image/jpg;url": "https://example.com/logo.jpg"}
Base64 format example: {"data:image/jpg;base64": "base64 string"}
"""
MIME_PATTERN = re.compile(r"^data:(.*);(path|base64|url)$")
def is_valid_format(self, original_data: Any):
return isinstance(original_data, dict) and is_multimedia_dict(original_data)
def extract_info(self, original_data: Any) -> MultimediaInfo:
if not self.is_valid_format(original_data):
return None
for key in original_data:
match = re.match(self.MIME_PATTERN, key)
if match:
mime_type, resource_type = match.group(1), match.group(2)
content = original_data[key]
return MultimediaInfo(mime_type, ResourceType(resource_type), content)
return None
def create_data(self, info: MultimediaInfo):
return {f"data:{info.mime_type};{info.resource_type.value}": info.content}
class AbstractMultimediaInfoConverter:
def convert(self, info: MultimediaInfo) -> MultimediaInfo:
"""
Change info's mime type/resource type/content based on the client's logic.
For cases that do not need to be changed, just return the original info.
:param info: The MultimediaInfo to be converted.
:type info: MultimediaInfo
:return: The converted MultimediaInfo.
:rtype: MultimediaInfo
"""
raise NotImplementedError()
class MultimediaConverter:
def __init__(self, flow_file: Path):
"""
Initialize the MultimediaConverter.
:param flow_file: The path to the YAML file. The YAML content will be used to determine the contract version.
:type flow_file: Path
"""
# TODO: check yaml content to determine the current contract version.
# Different contract version will have different multimedia format.
# The version exists in the yaml file, so we need to load the yaml to get version and init converter.
self.format_adapter = MultimediaFormatAdapter20231201()
def convert_content_recursively(self, content: Any, client_converter: AbstractMultimediaInfoConverter):
"""
Recursively converts multimedia data format in content.
:param content: The object that may contain multimedia data.
:type content: Any
:param client_converter: The converter to modify multimedia info based on the client's logic.
:type client_converter: AbstractMultimediaInfoConverter
:return: The content with changed multimedia format.
:rtype: Any
"""
process_func = partial(self._convert_content, converter=client_converter)
return self._process_content_recursively(content, process_func=process_func)
def _convert_content(self, original_data: Any, converter: AbstractMultimediaInfoConverter):
if not self.format_adapter.is_valid_format(original_data):
return original_data
info = self.format_adapter.extract_info(original_data)
# When can't extract multimedia info from original_data, return original_data directly.
if info is None:
return original_data
info = converter.convert(info)
return self.format_adapter.create_data(info)
def _process_content_recursively(self, content: Any, process_func: Callable):
if isinstance(content, list):
return [self._process_content_recursively(item, process_func) for item in content]
elif isinstance(content, dict):
if self.format_adapter.is_valid_format(content):
return process_func(original_data=content)
else:
return {k: self._process_content_recursively(v, process_func) for k, v in content.items()}
else:
return content
| promptflow/src/promptflow/promptflow/_utils/multimedia_data_converter.py/0 | {
"file_path": "promptflow/src/promptflow/promptflow/_utils/multimedia_data_converter.py",
"repo_id": "promptflow",
"token_count": 2299
} | 15 |