|
12 | 12 | // limitations under the License.
|
13 | 13 | // ----------------------------------------------------------------------------------
|
14 | 14 |
|
| 15 | +using System.Collections; |
15 | 16 | using Microsoft.Azure.Commands.HDInsight.Models;
|
16 | 17 | using Microsoft.Azure.Management.HDInsight;
|
17 | 18 | using Microsoft.WindowsAzure.Commands.ScenarioTest;
|
@@ -46,37 +47,14 @@ public void CanCreateNewConfigForRServer()
|
46 | 47 | [Trait(Category.AcceptanceType, Category.CheckIn)]
|
47 | 48 | public void CanAddSparkCustomConfigs()
|
48 | 49 | {
|
49 |
| - AzureHDInsightConfig config = new AzureHDInsightConfig(); |
50 |
| - |
51 |
| - var addConfigValuesCmdlet = new AddAzureHDInsightConfigValuesCommand |
52 |
| - { |
53 |
| - CommandRuntime = commandRuntimeMock.Object, |
54 |
| - HDInsightManagementClient = hdinsightManagementMock.Object, |
55 |
| - Config = config, |
56 |
| - SparkDefaults = new System.Collections.Hashtable() { { @"spark.executor.instances", "3" } }, |
57 |
| - SparkThriftConf = new System.Collections.Hashtable() { { @"spark.executor.cores", "4" } }, |
58 |
| - Spark2Defaults = new System.Collections.Hashtable() { { @"spark.executor.memory", "2048m" } }, |
59 |
| - Spark2ThriftConf = new System.Collections.Hashtable() { { @"spark.driver.memory.overhead", "1024" } }, |
60 |
| - }; |
61 |
| - |
62 |
| - addConfigValuesCmdlet.ExecuteCmdlet(); |
| 50 | + CustomizeSpark(1); |
| 51 | + } |
63 | 52 |
|
64 |
| - commandRuntimeMock.Verify( |
65 |
| - f => |
66 |
| - f.WriteObject( |
67 |
| - It.Is<AzureHDInsightConfig>( |
68 |
| - c => |
69 |
| - c.Configurations != null && |
70 |
| - c.Configurations.ContainsKey(ConfigurationKey.SparkDefaults) && |
71 |
| - c.Configurations[ConfigurationKey.SparkDefaults]["spark.executor.instances"].Equals("3") && |
72 |
| - c.Configurations.ContainsKey(ConfigurationKey.SparkThriftConf) && |
73 |
| - c.Configurations[ConfigurationKey.SparkThriftConf]["spark.executor.cores"].Equals("4") && |
74 |
| - c.Configurations.ContainsKey(ConfigurationKey.Spark2Defaults) && |
75 |
| - c.Configurations[ConfigurationKey.Spark2Defaults]["spark.executor.memory"].Equals("2048m") && |
76 |
| - c.Configurations.ContainsKey(ConfigurationKey.Spark2ThriftConf) && |
77 |
| - c.Configurations[ConfigurationKey.Spark2ThriftConf]["spark.driver.memory.overhead"].Equals("1024") && |
78 |
| - c.ScriptActions.Count == 0)), |
79 |
| - Times.Once); |
| 53 | + [Fact] |
| 54 | + [Trait(Category.AcceptanceType, Category.CheckIn)] |
| 55 | + public void CanAddSpark2CustomConfigs() |
| 56 | + { |
| 57 | + CustomizeSpark(2); |
80 | 58 | }
|
81 | 59 |
|
82 | 60 | public void CreateNewConfig(bool setEdgeNodeVmSize = false)
|
@@ -110,5 +88,49 @@ public void CreateNewConfig(bool setEdgeNodeVmSize = false)
|
110 | 88 | c.ScriptActions.Count == 0)),
|
111 | 89 | Times.Once);
|
112 | 90 | }
|
| 91 | + |
| 92 | + private void CustomizeSpark(int sparkVersion) |
| 93 | + { |
| 94 | + AzureHDInsightConfig config = new AzureHDInsightConfig(); |
| 95 | + |
| 96 | + Hashtable sparkDefaults = new Hashtable() { { @"spark.executor.instances", "3" } }; |
| 97 | + Hashtable sparkThriftConf = new Hashtable() { { @"spark.executor.cores", "4" } }; |
| 98 | + |
| 99 | + AddAzureHDInsightConfigValuesCommand addConfigValuesCmdlet = new AddAzureHDInsightConfigValuesCommand |
| 100 | + { |
| 101 | + CommandRuntime = commandRuntimeMock.Object, |
| 102 | + HDInsightManagementClient = hdinsightManagementMock.Object, |
| 103 | + Config = config, |
| 104 | + }; |
| 105 | + |
| 106 | + if (sparkVersion == 1) |
| 107 | + { |
| 108 | + addConfigValuesCmdlet.SparkDefaults = sparkDefaults; |
| 109 | + addConfigValuesCmdlet.SparkThriftConf = sparkThriftConf; |
| 110 | + } |
| 111 | + else |
| 112 | + { |
| 113 | + addConfigValuesCmdlet.Spark2Defaults = sparkDefaults; |
| 114 | + addConfigValuesCmdlet.Spark2ThriftConf = sparkThriftConf; |
| 115 | + } |
| 116 | + |
| 117 | + addConfigValuesCmdlet.ExecuteCmdlet(); |
| 118 | + |
| 119 | + commandRuntimeMock.Verify( |
| 120 | + f => |
| 121 | + f.WriteObject( |
| 122 | + It.Is<AzureHDInsightConfig>( |
| 123 | + c => |
| 124 | + c.Configurations != null && |
| 125 | + ((sparkVersion == 1 && c.Configurations.ContainsKey(ConfigurationKey.SparkDefaults) && |
| 126 | + c.Configurations[ConfigurationKey.SparkDefaults]["spark.executor.instances"].Equals(sparkDefaults["spark.executor.instances"]) && |
| 127 | + c.Configurations.ContainsKey(ConfigurationKey.SparkThriftConf) && |
| 128 | + c.Configurations[ConfigurationKey.SparkThriftConf]["spark.executor.cores"].Equals(sparkThriftConf["spark.executor.cores"])) || |
| 129 | + (sparkVersion == 2 && c.Configurations.ContainsKey(ConfigurationKey.Spark2Defaults) && |
| 130 | + c.Configurations[ConfigurationKey.Spark2Defaults]["spark.executor.instances"].Equals(sparkDefaults["spark.executor.instances"]) && |
| 131 | + c.Configurations.ContainsKey(ConfigurationKey.Spark2ThriftConf) && |
| 132 | + c.Configurations[ConfigurationKey.Spark2ThriftConf]["spark.executor.cores"].Equals(sparkThriftConf["spark.executor.cores"]))))), |
| 133 | + Times.Once); |
| 134 | + } |
113 | 135 | }
|
114 | 136 | }
|
0 commit comments