MAPREDUCE服务 MRS-新增并执行作业:请求示例

时间:2023-11-20 15:34:53

请求示例

  • 新增一个MapReduce作业

    POST https://{endpoint}/v2/{project_id}/clusters/{cluster_id}/job-executions
    
    {
      "job_name" : "MapReduceTest",
      "job_type" : "MapReduce",
      "arguments" : [ "obs://obs-test/program/hadoop-mapreduce-examples-x.x.x.jar", "wordcount", "obs://obs-test/input/", "obs://obs-test/job/mapreduce/output" ],
      "properties" : {
        "fs.obs.endpoint" : "obs endpoint",
        "fs.obs.access.key" : "xxx",
        "fs.obs.secret.key" : "yyy"
      }
    }
  • 新增一个SparkSubmit作业

    POST https://{endpoint}/v2/{project_id}/clusters/{cluster_id}/job-executions
    
    {
      "job_name" : "SparkSubmitTest",
      "job_type" : "SparkSubmit",
      "arguments" : [ "--master", "yarn", "--deploy-mode", "cluster", "--py-files", "obs://obs-test/a.py", "--conf", "spark.yarn.appMasterEnv.PYTHONPATH=/tmp:$PYTHONPATH", "--conf", "spark.yarn.appMasterEnv.aaa=aaaa", "--conf", "spark.executorEnv.aaa=executoraaa", "--properties-file", "obs://obs-test/test-spark.conf", "obs://obs-test/pi.py", "100000" ],
      "properties" : {
        "fs.obs.access.key" : "xxx",
        "fs.obs.secret.key" : "yyy"
      }
    }
  • 新增一个HiveScript作业

    POST https://{endpoint}/v2/{project_id}/clusters/{cluster_id}/job-executions
    
    {
      "job_name" : "HiveScriptTest",
      "job_type" : "HiveScript",
      "arguments" : [ "obs://obs-test/sql/test_script.sql" ],
      "properties" : {
        "fs.obs.endpoint" : "obs endpoint",
        "fs.obs.access.key" : "xxx",
        "fs.obs.secret.key" : "yyy"
      }
    }
  • 新建一个HiveSql作业

    POST https://{endpoint}/v2/{project_id}/clusters/{cluster_id}/job-executions
    
    {
      "job_name" : "HiveSqlTest",
      "job_type" : "HiveSql",
      "arguments" : [ "DROP TABLE IF EXISTS src_wordcount;\ncreate external table src_wordcount(line string) row format delimited fields terminated by \"\\n\" stored as textfile location \"obs://donotdel-gxc/input/\";\ninsert into src_wordcount values(\"v1\")" ],
      "properties" : {
        "fs.obs.endpoint" : "obs endpoint",
        "fs.obs.access.key" : "xxx",
        "fs.obs.secret.key" : "yyy"
      }
    }
  • 新建一个DistCp作业

    POST https://{endpoint}/v2/{project_id}/clusters/{cluster_id}/job-executions
    
    {
      "job_name" : "DistCpTest",
      "job_type" : "DistCp",
      "arguments" : [ "obs://obs-test/DistcpJob/", "/user/test/sparksql/" ],
      "properties" : {
        "fs.obs.endpoint" : "obs endpoint",
        "fs.obs.access.key" : "xxx",
        "fs.obs.secret.key" : "yyy"
      }
    }
  • 新建一个SparkScript作业

    POST https://{endpoint}/v2/{project_id}/clusters/{cluster_id}/job-executions
    
    {
      "job_type" : "SparkSql",
      "job_name" : "SparkScriptTest",
      "arguments" : [ "op-key1", "op-value1", "op-key2", "op-value2", "obs://obs-test/sql/test_script.sql" ],
      "properties" : {
        "fs.obs.access.key" : "xxx",
        "fs.obs.secret.key" : "yyy"
      }
    }
  • 新建一个SparkSql作业

    POST https://{endpoint}/v2/{project_id}/clusters/{cluster_id}/job-executions
    
    {
      "job_type" : "SparkSql",
      "job_name" : "SparkSqlTest",
      "arguments" : [ "op-key1", "op-value1", "op-key2", "op-value2", "create table student_info3 (id string,name string,gender string,age int,addr string);" ],
      "properties" : {
        "fs.obs.access.key" : "xxx",
        "fs.obs.secret.key" : "yyy"
      }
    }
  • 新建一个Flink作业

    POST https://{endpoint}/v2/{project_id}/clusters/{cluster_id}/job-executions
    
    {
      "job_name" : "flinkTest",
      "job_type" : "Flink",
      "arguments" : [ "run", "-d", "-ynm", "testExcutorejobhdfsbatch", "-m", "yarn-cluster", "hdfs://test/examples/batch/WordCount.jar" ],
      "properties" : {
        "fs.obs.endpoint" : "obs endpoint",
        "fs.obs.access.key" : "xxx",
        "fs.obs.secret.key" : "yyy"
      }
    }
  • 新增一个SparkPython作业(该类型作业将转换为SparkSubmit类型提交,MRS控制台界面的作业类型展示为SparkSubmit,通过接口查询作业列表信息时作业类型请选择SparkSubmit。)

    POST https://{endpoint}/v2/{project_id}/clusters/{cluster_id}/job-executions
    
    {
      "job_type" : "SparkPython",
      "job_name" : "SparkPythonTest",
      "arguments" : [ "--master", "yarn", "--deploy-mode", "cluster", "--py-files", "obs://obs-test/a.py", "--conf", "spark.yarn.appMasterEnv.PYTHONPATH=/tmp:$PYTHONPATH", "--conf", "spark.yarn.appMasterEnv.aaa=aaaa", "--conf", "spark.executorEnv.aaa=executoraaa", "--properties-file", "obs://obs-test/test-spark.conf", "obs://obs-test/pi.py", 100000 ],
      "properties" : {
        "fs.obs.access.key" : "xxx",
        "fs.obs.secret.key" : "yyy"
      }
    }
support.huaweicloud.com/api-mrs/CreateExecuteJob.html