MAPREDUCE服务 MRS-创建集群并提交作业:请求示例

时间:2023-11-20 15:34:53

请求示例

创建一个版本号为MRS 3.1.0的自定义管控合设集群,并提交一个作业类型为HiveScript的作业。

POST /v2/{project_id}/run-job-flow

{
  "cluster_version" : "MRS 3.1.0",
  "cluster_name" : "mrs_heshe_dm",
  "cluster_type" : "CUSTOM",
  "charge_info" : {
    "charge_mode" : "postPaid"
  },
  "region" : "",
  "availability_zone" : "",
  "vpc_name" : "vpc-37cd",
  "subnet_id" : "1f8c5ca6-1f66-4096-bb00-baf175954f6e",
  "subnet_name" : "subnet",
  "components" : "Hadoop,Spark2x,HBase,Hive,Hue,Loader,Kafka,Storm,Flume,Flink,Oozie,Ranger,Tez",
  "safe_mode" : "KERBEROS",
  "manager_admin_password" : "your password",
  "login_mode" : "PASSWORD",
  "node_root_password" : "your password",
  "mrs_ecs_default_agency" : "MRS_ECS_DEFAULT_AGENCY",
  "template_id" : "mgmt_control_combined_v2",
  "log_collection" : 1,
  "tags" : [ {
    "key" : "tag1",
    "value" : "111"
  }, {
    "key" : "tag2",
    "value" : "222"
  } ],
  "node_groups" : [ {
    "group_name" : "master_node_default_group",
    "node_num" : 3,
    "node_size" : "Sit3.4xlarge.4.linux.bigdata",
    "root_volume" : {
      "type" : "SAS",
      "size" : 480
    },
    "data_volume" : {
      "type" : "SAS",
      "size" : 600
    },
    "data_volume_count" : 1,
    "assigned_roles" : [ "OMSServer:1,2", "SlapdServer:1,2", "KerberosServer:1,2", "KerberosAdmin:1,2", "quorumpeer:1,2,3", "NameNode:2,3", "Zkfc:2,3", "JournalNode:1,2,3", "ResourceManager:2,3", "JobHistoryServer:2,3", "DBServer:1,3", "Hue:1,3", "LoaderServer:1,3", "MetaStore:1,2,3", "WebHCat:1,2,3", "HiveServer:1,2,3", "HMaster:2,3", "MonitorServer:1,2", "Nimbus:1,2", "UI:1,2", "JDBCServer2x:1,2,3", "JobHistory2x:2,3", "SparkResource2x:1,2,3", "oozie:2,3", "LoadBalancer:2,3", "TezUI:1,3", "TimelineServer:3", "RangerAdmin:1,2", "UserSync:2", "TagSync:2", "KerberosClient", "SlapdClient", "meta", "HSConsole:2,3", "FlinkResource:1,2,3", "DataNode:1,2,3", "NodeManager:1,2,3", "IndexServer2x:1,2", "ThriftServer:1,2,3", "RegionServer:1,2,3", "ThriftServer1:1,2,3", "RESTServer:1,2,3", "Broker:1,2,3", "Supervisor:1,2,3", "Logviewer:1,2,3", "Flume:1,2,3", "HSBroker:1,2,3" ]
  }, {
    "group_name" : "node_group_1",
    "node_num" : 3,
    "node_size" : "Sit3.4xlarge.4.linux.bigdata",
    "root_volume" : {
      "type" : "SAS",
      "size" : 480
    },
    "data_volume" : {
      "type" : "SAS",
      "size" : 600
    },
    "data_volume_count" : 1,
    "assigned_roles" : [ "DataNode", "NodeManager", "RegionServer", "Flume:1", "Broker", "Supervisor", "Logviewer", "HBaseIndexer", "KerberosClient", "SlapdClient", "meta", "HSBroker:1,2", "ThriftServer", "ThriftServer1", "RESTServer", "FlinkResource" ]
  }, {
    "group_name" : "node_group_2",
    "node_num" : 1,
    "node_size" : "Sit3.4xlarge.4.linux.bigdata",
    "root_volume" : {
      "type" : "SAS",
      "size" : 480
    },
    "data_volume" : {
      "type" : "SAS",
      "size" : 600
    },
    "data_volume_count" : 1,
    "assigned_roles" : [ "NodeManager", "KerberosClient", "SlapdClient", "meta", "FlinkResource" ]
  } ],
  "log_uri" : "obs://bucketTest/logs",
  "delete_when_no_steps" : true,
  "steps" : [ {
    "job_execution" : {
      "job_name" : "import_file",
      "job_type" : "DistCp",
      "arguments" : [ "obs://test/test.sql", "/user/hive/input" ]
    }
  }, {
    "job_execution" : {
      "job_name" : "hive_test",
      "job_type" : "HiveScript",
      "arguments" : [ "obs://test/hive/sql/HiveScript.sql" ]
    }
  } ]
}
support.huaweicloud.com/api-mrs/RunJobFlow.html