huawei-cloud-api-definitions-DataArtsStudio 0.1.20250411

Huawei Cloud API definitions, generated from OpenAPI spec
Documentation
1
{"base_path":"/","components":null,"consumes":"[\"application/json;charset=UTF-8\"]","definitions":{"Approver":{"description":"审批人","properties":{"approver_name":{"description":"审批人名称","type":"string"}},"required":["approver_name"],"type":"object"},"BasicInfo":{"description":"作业基本信息","properties":{"custom_fields":{"description":"用户自定义属性字段","maxLength":2048,"type":"object","x-example":"Map<String,String>"},"execute_user":{"description":"作业执行用户,必须是已存在的用户名。","type":"string"},"instance_timeout":{"default":60,"description":"实例超时时间,单位是分钟。","format":"int32","maximum":1440,"minimum":5,"type":"integer"},"owner":{"description":"作业责任人","type":"string"},"priority":{"default":0,"description":"作业优先级,0代表高优先级,1代表中优先级,2代表低优先级。","format":"int32","maximum":2,"minimum":0,"type":"integer"}},"type":"object"},"Condition":{"description":"依赖条件","properties":{"expression":{"description":"EL表达式,如果EL表达式的计算结果为true,则触发执行本节点。","type":"string"},"pre_node_name":{"description":"本节点依赖的前一个节点名称","type":"string"}},"type":"object"},"Cron":{"description":"当type为CRON时,配置调度频率、启动时间等信息","properties":{"depend_jobs":{"description":"依赖其它作业列表","items":{"$ref":"#/definitions/DependJob"},"type":"array"},"depend_pre_period":{"description":"是否依赖本作业上一个运行周期任务的执行结果","type":"boolean"},"end_time":{"description":"调度结束时间,采用ISO 8601时间表示方法,格式为yyyy-MM-dd'T'HH:mm:ssZ,例如2018-10-22T23:59:59+08表示的时间为2018年10月22日23时59分59秒,在正8区,即北京时区。如果结束时间不配置,作业会按照调度周期一直执行下去。","type":"string"},"expression":{"description":"Cron表达式,格式为\"<秒> <分> <时> <天> <月> <星期>\"","example":"0 0 0-23/1 * * ?","type":"string"},"expression_time_zone":{"description":"Cron表达式对应的时区信息,例如GMT+8。默认值:使用DataArts Studio服务端所在的时区。","example":"GMT+8","type":"string"},"period":{"description":"用于描述运行的间隔时间,格式为时间+时间单位。需要与expression中的表达式对应","example":"1 hours","type":"string"},"start_time":{"description":"调度开始时间,采用ISO 8601时间表示方法,格式为yyyy-MM-dd'T'HH:mm:ssZ,例如2018-10-22T23:59:59+08表示的时间为2018年10月22日23时59分59秒,在正8区,即北京时区。","example":"2022-10-22T23:59:59+08","type":"string"}},"required":["start_time","expression","period"],"type":"object"},"DependJob":{"description":"依赖作业列表","properties":{"depend_fail_policy":{"default":"FAIL","description":"依赖作业任务执行失败处理策略:\n- FAIL:停止作业,设置作业为失败状态。\n- IGNORE:继续执行作业。\n- SUSPEND: 挂起作业。","enum":["FAIL","IGNORE","SUSPEND"],"type":"string"},"depend_period":{"default":"SAME_PERIOD","description":"依赖周期:\n- SAME_PERIOD:依赖被依赖作业的同周期任务的执行结果。\n- PRE_PERIOD:依赖被依赖作业的前一周期任务的执行结果。","enum":["SAME_PERIOD","PRE_PERIOD"],"type":"string"},"jobs":{"description":"依赖的作业名称列表,必须依赖已存在的作业。","items":{"description":"依赖的作业名称。","example":"job_a","type":"string"},"type":"array"}},"required":["jobs"],"type":"object"},"ErrorResponse":{"properties":{"error_code":{"description":"错误码","example":"DLS.1000","maxLength":128,"minLength":1,"type":"string"},"error_msg":{"description":"错误描述","example":"process failed","maxLength":10240,"minLength":1,"type":"string"}},"type":"object"},"Event":{"description":"实时作业节点事件触发配置","properties":{"channel":{"description":"DIS通道名称。通过DIS管理控制台获取通道名称:登录管理控制台。单击“数据接入服务”,左侧列表选择“通道管理”。通道管理页面中列出了用户拥有的通道","type":"string"},"concurrent":{"default":1,"description":"调度并发数","format":"int32","maximum":128,"minimum":1,"type":"integer"},"event_type":{"description":"事件类型。\n- KAFKA: 选择对应的连接名称与topic,当有新的kafka消息时将会触发作业运行一次\n- DIS: 当前只支持监听DIS通道的新上报数据事件,每上报一条数据,触发作业运行一次。\n- OBS: 选择要监听的OBS路径,如果该路径下有新增文件,则触发调度;新增的文件的路径名,可以通过变量Job.trigger.obsNewFiles引用。前提条件:该OBS路径已经配置DIS消息通知。","enum":["KAFKA","DIS","OBS"],"type":"string"},"fail_policy":{"default":"SUSPEND","description":"执行失败处理策略。\n- SUSPEND: 挂起\n- IGNORE:忽略失败,读取下一事件","enum":["SUSPEND","IGNORE"],"type":"string"},"read_policy":{"default":"LAST","description":"读取策略。\n- LAST: 从上次位置读取\n- NEW:从最新位置读取","enum":["LAST","NEW"],"type":"string"}},"required":["event_type","channel"],"type":"object"},"JobInfoRequest":{"description":"作业详情请求体","properties":{"approvers":{"description":"作业审批人","items":{"$ref":"#/definitions/Approver"},"type":"array"},"basic_config":{"$ref":"#/definitions/BasicInfo"},"create_user":{"description":"创建用户","type":"string"},"directory":{"description":"目录路径","type":"string"},"last_update_user":{"description":"作业最后修改人","type":"string"},"log_path":{"description":"日志路径","type":"string"},"name":{"description":"作业名称,只能包含六种字符:英文字母、数字、中文、中划线、下划线和点号。作业名称不能重复。","maxLength":128,"type":"string"},"nodes":{"description":"节点清单","items":{"$ref":"#/definitions/Node"},"type":"array"},"owner":{"description":"责任人","type":"string"},"params":{"description":"作业参数清单","items":{"$ref":"#/definitions/JobParam"},"type":"array"},"priority":{"description":"优先级","type":"string"},"process_type":{"default":"BATCH","description":"作业类型:\n - REAL_TIME: 实时处理\n - BATCH: 批处理","enum":["BATCH","REAL_TIME"],"type":"string"},"schedule":{"$ref":"#/definitions/Schedule"},"single_node_job_flag":{"description":"是否选择单任务,默认为false","type":"boolean"},"single_node_job_type":{"description":"单任务类型","enum":["DliSQL","DwsSQL","HiveSQL","SparkSQL","RdsSQL","DorisSQL","ASSIGNMENT","BRANCH","MERGE","NormalJob","OneClick","MrsFlink","FlinkSQL","FlinkJar","DLISpark"],"type":"string"},"target_status":{"description":"在开启审批开关后,需要填写该字段。表示创建作业的目标状态。\n - SAVED: 保存态,表示作业仅保存,无法调度运行,需要提交并审核通过后才能运行。\n - SUBMITTED: 提交态,表示作业保存后会自动提交,需要审核通过才能运行。\n - PRODUCTION: 生产态,表示作业跳过审批环节,创建后可以直接运行。注意:只有工作空间的管理员用户才能创建生产态的作业。","enum":["SAVED","SUBMITTED","PRODUCTION"],"type":"string"}},"required":["name","nodes","schedule","process_type"],"type":"object"},"JobParam":{"description":"作业参数","properties":{"name":{"description":"参数名称,只能包含英文字母、数字、中划线和下划线。","maxLength":64,"minLength":1,"type":"string"},"type":{"default":"variable","description":"参数类型:\n  - variable: 变量\n  - constants: 常量","enum":["variable","constants"],"type":"string"},"value":{"description":"参数值","maxLength":1024,"minLength":1,"type":"string"}},"required":["name","value"],"type":"object"},"Location":{"description":"节点在作业画布上的位置","properties":{"x":{"description":"节点在作业画布上的横轴位置","type":"string"},"y":{"description":"节点在作业画布上的纵轴位置","type":"string"}},"required":["x","y"],"type":"object"},"Node":{"description":"节点对象","properties":{"conditions":{"description":"节点执行条件,如果配置此参数,本节点是否执行由condition的字段expression所保存的EL表达式计算结果决定","items":{"$ref":"#/definitions/Condition"},"type":"array"},"cron_trigger":{"$ref":"#/definitions/Cron"},"event_trigger":{"$ref":"#/definitions/Event"},"exec_time_out_retry":{"default":"false","description":"节点是否超时重试","type":"string"},"fail_policy":{"default":"FAIL","description":"作业失败策略:\n- FAIL:终止当前作业执行计划\n- IGNORE:继续执行下一个节点\n- SUSPEND:挂起当前作业执行计划\n- FAIL_CHILD: 终止后续节点执行计划","enum":["FAIL","IGNORE","SUSPEND","FAIL_CHILD"],"type":"string"},"location":{"$ref":"#/definitions/Location"},"max_execution_time":{"default":60,"description":"节点最大执行时间,如果节点在最大执行时间内还未执行完成,会把节点置为失败状态。单位:分钟。","format":"int32","maximum":1440,"minimum":5,"type":"integer"},"name":{"description":"节点名称。只能包含六种字符:英文字母、数字、中文、中划线、下划线和点号。同一个作业中节点名称不能重复。","maxLength":128,"type":"string"},"polling_interval":{"default":10,"description":"轮询节点执行结果时间间隔。单位:秒。","format":"int32","maximum":60,"minimum":1,"type":"integer"},"pre_node_names":{"description":"本节点依赖的前面的节点名称列表。","items":{"description":"节点名称","type":"string"},"type":"array"},"properties":{"description":"节点的属性。","items":{"$ref":"#/definitions/Property"},"type":"array"},"retry_interval":{"default":120,"description":"失败重试时间间隔。单位:秒。","format":"int32","maximum":120,"minimum":5,"type":"integer"},"retry_times":{"default":0,"description":"节点失败重试次数。0代表不重试。","format":"int32","maximum":5,"minimum":0,"type":"integer"},"type":{"description":"节点类型:\n- HiveSQL:执行Hive SQL脚本\n- SparkSQL:执行Spark SQL脚本\n- DWSSQL:执行DWS SQL脚本\n- DLISQL:执行DLI SQL脚本\n- RDSSQL:执行RDS SQL脚本\n- Shell:执行Shell脚本\n- Python:执行Python脚本\n- DISTransferTask:创建DIS转储任务\n- CDMJob:执行CDM作业\n- OBSManager:执行OBS相关操作\n- Dummy:虚拟节点\n- RESTAPI:执行Rest API调用\n- DLISpark:执行DLI Spark作业\n- SMN:执行SMN通知\n- MRSSpark:执行MRS Spark作业\n- MapReduce:执行MapReduce作业\n- MRSFlink:执行MRS服务的FLlink作业\n- MRSFlinkJob:执行MRS服务的FlinkJob作业\n- MRSHetuEngine: 执行MRS服务的HetuEngine作业","enum":["HiveSQL","SparkSQL","DWSSQL","DLISQL","RDSSQL","Shell","Python","DISTransferTask","CDMJob","OBSManager","Dummy","RESTAPI","DLISpark","SMN","MRSSpark","MapReduce","MRSFlink","MRSFlinkJob","MRSHetuEngine","DataMigration"],"type":"string"}},"required":["name","type","properties","location"],"type":"object"},"Property":{"description":"节点属性。每种节点类型有自己的定义。","properties":{"name":{"description":"属性名称。\n1.SQL类 HiveSQL、SparkSQL、DWSSQL、DLISQL、RDSSQL:\n  取值如下:\n    scriptName,脚本名称\n    database,数据库名称\n    connectionName,连接名称\n    scriptArgs,脚本参数,key、value形式,多个参数间用\\n分隔,例如key1=value1\\nkey2=value2\n2.Host类 Shell Python\n  取值如下:\n    scriptName,脚本名称\n    connectionName,连接名称\n    arguments,脚本参数,key、value形式,多个参数间用\\n分隔,例如key1=value1\\nkey2=value2\n3.CDMJob\n  取值如下:\n    clusterName,集群名称\n    jobName,作业名称\n4.DISTransferTask\n  取值如下:\n    streamName,DIS通道名称\n    destinationType,转储目标\n    duplicatePolicy,转储任务重名处理策略\n    configuration,转储配置\n5.OBSManager\n  取值如下:\n    action,动作类型\n    path,OBS路径\n6.RESTAPI\n  取值如下:\n    url,URL地址\n    method,HTTP方法\n    headers, HTTP消息头,每个消息头的格式为<消息头名称>=<值>,多个消息头之间使用换行符分割。\n    body, 消息体\n7.SMN\n  取值如下:\n    topic,SMN主题URN\n    subject,消息标题,给邮箱订阅者发送邮件时作为邮件主题。\n    messageType, 消息类型\n    message, 发送的消息。\n8.MRSSpark、MapReduce\n  取值如下:\n    clusterName,集群名称\n    jobName,作业名称\n    resourcePath,自定义Jar包OBS资源路径\n    parameters,  Jar包自定义参数;对于开发的自定义Jar包,可以在此处对参数进行输入替换\n    input,输入路径\n    output,输出路径\n    programParameter,运行程序参数;允许多个key:value,多个参数要用竖线隔开。\n9.DLISpark\n  取值如下:\n    clusterName,集群名称\n    jobName,作业名称\n    resourceType,DLI作业运行资源类型\n    sparkConfig,Spark作业运行参数\n    jobClass,主类名称。当应用程序类型为“.jar”时,主类名称不能为空。\n    jarArgs,主类入口参数\n    resourcePath,JAR包资源路径\n10.MRSFlink\n  取值如下:\n    clusterName,集群名称\n    jobName,作业名称\n    flinkJobType,flink作业类型;FLink SQL作业或Flink JAR作业。\n    flinkJobProcessType,flink作业处理模式;批处理模式或流处理模式。\n    scriptName,脚本名称;Flink SQL关联的SQL脚本。\n    resourcePath,JAR包资源路径\n    input,输入路径\n    output,输出路径\n    programParameter,运行程序参数;允许多个key:value,多个参数要用竖线隔开。\n11.MRS HetuEngine\n  取值如下:\n    clusterName,集群名称\n    jobName,作业名称\n    statementOrScript,脚本类型;SQL语句或关联SQL脚本\n    statement,自定义的SQL内容。\n    scriptName,选择关联的SQL脚本。\n    Data Warehouse,指定HetuEngine服务所需数据连接。\n    Schema,使用HetuEngine服务所要访问的数据源schema名称。\n    Database,使用HetuEngine服务所要访问的数据源database名称。\n    Queue,使用HetuEngine服务所需资源队列名称。","type":"string"},"value":{"description":"属性值","type":"string"}},"type":"object"},"Schedule":{"description":"调度配置","properties":{"cron":{"$ref":"#/definitions/Cron"},"event":{"$ref":"#/definitions/Event"},"type":{"description":"调度类型\n- EXECUTE_ONCE: 作业立即运行,只运行一次。\n- CRON: 作业按指定频率周期执行。\n- EVENT:  根据事件触发执行。","enum":["EXECUTE_ONCE","CRON","EVENT"],"type":"string"}},"required":["type"],"type":"object"}},"description":null,"group_id":"23bb69798aaf409da161a43a90617121","host":"dayu.cn-north-4.myhuaweicloud.com","id":"9db3bec3fa1940f5971a103839371697","info_version":"v1","method":"post","name":"CreateFactoryJob","parameters":{"tenantIdParam":{"description":"项目id","in":"path","name":"project_id","required":true,"type":"string"},"tokenParam":{"description":"IAM Token","in":"header","name":"X-Auth-Token","required":true,"type":"string"},"workspaceParam":{"description":"DataArts Studio工作空间ID","in":"header","name":"workspace","required":true,"type":"string"}},"paths":{"/v2/{project_id}/factory/jobs":{"post":{"description":"创建作业","operationId":"CreateFactoryJob","parameters":[{"$ref":"#/parameters/tokenParam"},{"$ref":"#/parameters/workspaceParam"},{"$ref":"#/parameters/tenantIdParam"},{"description":"请求body体","in":"body","name":"CreateFactoryJobRequestBody","required":true,"schema":{"$ref":"#/definitions/JobInfoRequest"}}],"produces":["application/json"],"responses":{"204":{"description":"success"},"400":{"description":"BadRequest","examples":{"application/json":{"error_code":"DLF.0102","error_msg":"The job name already exists."}},"schema":{"$ref":"#/definitions/ErrorResponse"}}},"summary":"创建作业","tags":["作业开发接口"],"x-is-registered":"Y","x-order":1,"x-support-sdk":"Y"}}},"product_short":"DataArtsStudio","region_id":"cn-north-4","schemes":["HTTPS"],"security_definitions":null,"summary":"创建作业","tags":"作业开发接口","uri":null,"version":"2.0"}