From 3cd9ad2406c59cd0ede6c9c8428a4ce4b805f8fa Mon Sep 17 00:00:00 2001 From: Tim Ellison <t.p.ellison@gmail.com> Date: Wed, 13 May 2015 21:01:42 +0100 Subject: [PATCH] =?UTF-8?q?[MINOR]=20Enhance=20SizeEstimator=20to=20detect?= =?UTF-8?q?=20IBM=20compressed=20refs=20and=20s390=20=E2=80=A6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit …arch. - zSeries 64-bit Java reports its architecture as s390x, so enhance the 64-bit check to accommodate that value. - SizeEstimator can detect whether IBM Java is using compressed object pointers using info in the "java.vm.info" property, so will do a better job than failing on the HotSpot MBean and guessing. Author: Tim Ellison <t.p.ellison@gmail.com> Closes #6085 from tellison/SizeEstimator and squashes the following commits: 1b6ff6a [Tim Ellison] Merge branch 'master' of https://github.com/apache/spark into SizeEstimator 0968989 [Tim Ellison] [MINOR] Enhance SizeEstimator to detect IBM compressed refs and s390 arch. --- .../scala/org/apache/spark/util/SizeEstimator.scala | 8 +++++++- .../org/apache/spark/util/SizeEstimatorSuite.scala | 12 ++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala b/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala index d91c3294dd..968a72d5ad 100644 --- a/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala +++ b/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala @@ -75,7 +75,8 @@ private[spark] object SizeEstimator extends Logging { // Sets object size, pointer size based on architecture and CompressedOops settings // from the JVM. private def initialize() { - is64bit = System.getProperty("os.arch").contains("64") + val arch = System.getProperty("os.arch") + is64bit = arch.contains("64") || arch.contains("s390x") isCompressedOops = getIsCompressedOops objectSize = if (!is64bit) 8 else { @@ -97,6 +98,11 @@ private[spark] object SizeEstimator extends Logging { return System.getProperty("spark.test.useCompressedOops").toBoolean } + // java.vm.info provides compressed ref info for IBM JDKs + if (System.getProperty("java.vendor").contains("IBM")) { + return System.getProperty("java.vm.info").contains("Compressed Ref") + } + try { val hotSpotMBeanName = "com.sun.management:type=HotSpotDiagnostic" val server = ManagementFactory.getPlatformMBeanServer() diff --git a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala index 133a76f28e..04f0f3749d 100644 --- a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala @@ -45,6 +45,10 @@ class DummyClass6 extends DummyClass5 { val y: Boolean = true } +class DummyClass7 { + val x: DummyClass1 = new DummyClass1 +} + object DummyString { def apply(str: String) : DummyString = new DummyString(str.toArray) } @@ -197,4 +201,12 @@ class SizeEstimatorSuite assertResult(24)(SizeEstimator.estimate(new DummyClass5)) assertResult(32)(SizeEstimator.estimate(new DummyClass6)) } + + test("check 64-bit detection for s390x arch") { + System.setProperty("os.arch", "s390x") + val initialize = PrivateMethod[Unit]('initialize) + SizeEstimator invokePrivate initialize() + // Class should be 32 bytes on s390x if recognised as 64 bit platform + assertResult(32)(SizeEstimator.estimate(new DummyClass7)) + } } -- GitLab