Contents

Hive3.1.2编译支持Spark3.0.0

Contents

1. 下载源码

1
2
3
4
5
6
7
8
9
wget https://downloads.apache.org/hive/hive-3.1.2/apache-hive-3.1.2-src.tar.gz
md5sum apache-hive-3.1.2-src.tar.gz
d25e11c52ced2eba8eb419548cd38573  apache-hive-3.1.2-src.tar.gz
tar zxvf apache-hive-3.1.2-src.tar.gz
#or
git clone https://github.com/apache/hive.git
git checkout rel/release-3.1.2
#or
wget https://codeload.github.com/apache/hive/tar.gz/rel/release-3.1.2

2. 修改guava依赖

1
2
3
4
5
6
7
8
#pom.xml
#guava 和hadoop 3.1.3的27.0保持一致
# on hadoop 3.13
#ls -la /opt/module/hadoop-3.1.3/share/hadoop/common/lib | grep guava*
#guava-27.0-jre.jar
# line 147
<guava.version>19.0</guava.version>
<guava.version>27.0-jre</guava.version>

3. 编译报错

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
yum -y install  gcc gcc-c++ zlib-devel autoconf automake libtool
mvn clean package -DskipTests -Pdist
###################################################################################################################
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-compiler-plugin:3.6.1:compile (default-compile) on project hive-llap-common: Compilation failure: Compilation failure: 
[ERROR] /home/forsre/hive/apache-hive-3.1.2-src/llap-common/src/java/org/apache/hadoop/hive/llap/AsyncPbRpcProxy.java:[173,16] method addCallback in class com.google.common.util.concurrent.Futures cannot be applied to given types;
[ERROR]   required: com.google.common.util.concurrent.ListenableFuture<V>,com.google.common.util.concurrent.FutureCallback<? super V>,java.util.concurrent.Executor
[ERROR]   found: com.google.common.util.concurrent.ListenableFuture<U>,org.apache.hadoop.hive.llap.AsyncPbRpcProxy.ResponseCallback<U>
[ERROR]   reason: cannot infer type-variable(s) V
[ERROR]     (actual and formal argument lists differ in length)
[ERROR] /home/forsre/hive/apache-hive-3.1.2-src/llap-common/src/java/org/apache/hadoop/hive/llap/AsyncPbRpcProxy.java:[274,12] method addCallback in class com.google.common.util.concurrent.Futures cannot be applied to given types;
[ERROR]   required: com.google.common.util.concurrent.ListenableFuture<V>,com.google.common.util.concurrent.FutureCallback<? super V>,java.util.concurrent.Executor
[ERROR]   found: com.google.common.util.concurrent.ListenableFuture<java.lang.Void>,<anonymous com.google.common.util.concurrent.FutureCallback<java.lang.Void>>
[ERROR]   reason: cannot infer type-variable(s) V
[ERROR]     (actual and formal argument lists differ in length)
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <args> -rf :hive-llap-common

4. 修改报错 8个地方

REF1

REF2

BACKUP

4.1 llap-common/src/java/org/apache/hadoop/hive/llap/AsyncPbRpcProxy.java

1
2
3
4
5
6
7
#llap-common/src/java/org/apache/hadoop/hive/llap/AsyncPbRpcProxy.java 
# line 174
            request.getCallback(), nodeId, this));
            request.getCallback(), nodeId, this),MoreExecutors.directExecutor());
# line 286
    });
    },MoreExecutors.directExecutor());

4.2 llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
#llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java 
#line 747
      Futures.addCallback(nodeEnablerFuture, new LoggingFutureCallback("NodeEnablerThread", LOG));
      Futures.addCallback(nodeEnablerFuture, new LoggingFutureCallback("NodeEnablerThread", LOG), MoreExecutors.directExecutor());

#line 752
          new LoggingFutureCallback("DelayedTaskSchedulerThread", LOG));
          new LoggingFutureCallback("DelayedTaskSchedulerThread", LOG), MoreExecutors.directExecutor());

#line 755
      Futures.addCallback(schedulerFuture, new LoggingFutureCallback("SchedulerThread", LOG));
      Futures.addCallback(schedulerFuture, new LoggingFutureCallback("SchedulerThread", LOG), MoreExecutors.directExecutor());

4.3 ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WorkloadManager.java

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
# ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WorkloadManager.java

#22

import com.google.common.util.concurrent.MoreExecutors;

#line 1095
    Futures.addCallback(future, FATAL_ERROR_CALLBACK);
    Futures.addCallback(future, FATAL_ERROR_CALLBACK, MoreExecutors.directExecutor());
#line 1928
      Futures.addCallback(getFuture, this);
      Futures.addCallback(getFuture, this, MoreExecutors.directExecutor());
#line 1982
        Futures.addCallback(waitFuture, this);
        Futures.addCallback(waitFuture, this, MoreExecutors.directExecutor());

4.4 ql/src/test/org/apache/hadoop/hive/ql/exec/tez/SampleTezSessionState.java

1
2
3
4
5
6
7
8
9
# ql/src/test/org/apache/hadoop/hive/ql/exec/tez/SampleTezSessionState.java

#line 21

import com.google.common.util.concurrent.MoreExecutors;

#line 131
    });
    }, MoreExecutors.directExecutor());

4.5 llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorService.java

1
2
3
4
5
# llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorService.java

#line 178
    Futures.addCallback(future, new WaitQueueWorkerCallback());
    Futures.addCallback(future, new WaitQueueWorkerCallback(), MoreExecutors.directExecutor());

4.6 llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapTaskReporter.java

1
2
3
4
5
# llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapTaskReporter.java

#line 131
    Futures.addCallback(future, new HeartbeatCallback(errorReporter));
    Futures.addCallback(future, new HeartbeatCallback(errorReporter), MoreExecutors.directExecutor());

4.7 llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
# llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java

#line 177
    });
    }, MoreExecutors.directExecutor());
#line 277
    });
    }, MoreExecutors.directExecutor());
#line 345
              });
              }, MoreExecutors.directExecutor());

4.8 druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidScanQueryRecordReader.java

1
2
3
4
5
6
7
8
9
# druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidScanQueryRecordReader.java

#line 28
import com.google.common.collect.Iterators;
import java.util.Collections;

#line 46
  private Iterator<List<Object>> compactedValues = Iterators.emptyIterator();
  private Iterator<List<Object>> compactedValues = Collections.emptyIterator();

5. 编译通过

1
mvn clean package -DskipTests -Pdist

6. 修改spark依赖

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
#pom.xml
# line 201-203
    <spark.version>2.3.0</spark.version>
    <scala.binary.version>2.11</scala.binary.version>
    <scala.version>2.11.8</scala.version>
    
    
    <spark.version>3.0.0</spark.version>
    <scala.binary.version>2.12</scala.binary.version>
    <scala.version>2.12.10</scala.version>

7. 编译报错

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
ERROR] Failed to execute goal org.apache.maven.plugins:maven-compiler-plugin:3.6.1:compile (default-compile) on project hive-spark-client: Compilation failure: Compilation failure: 
[ERROR] /home/forsre/hive/apache-hive-3.1.2-src/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounter.java:[22,24] cannot find symbol
[ERROR]   symbol:   class Accumulator
[ERROR]   location: package org.apache.spark
[ERROR] /home/forsre/hive/apache-hive-3.1.2-src/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounter.java:[23,24] cannot find symbol
[ERROR]   symbol:   class AccumulatorParam
[ERROR]   location: package org.apache.spark
[ERROR] /home/forsre/hive/apache-hive-3.1.2-src/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounter.java:[30,11] cannot find symbol
[ERROR]   symbol:   class Accumulator
[ERROR]   location: class org.apache.hive.spark.counter.SparkCounter
[ERROR] /home/forsre/hive/apache-hive-3.1.2-src/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounter.java:[91,41] cannot find symbol
[ERROR]   symbol:   class AccumulatorParam
[ERROR]   location: class org.apache.hive.spark.counter.SparkCounter
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <args> -rf :hive-spark-client

8. 修改报错

8.1 spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounter.java

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
# spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounter.java

# line 22-23 remove and add
import org.apache.spark.Accumulator;
import org.apache.spark.AccumulatorParam;
#new 23 and new 22 is empty
import org.apache.spark.util.LongAccumulator;


#line 30
  private Accumulator<Long> accumulator;
  
  private LongAccumulator accumulator;
  
#line 58 remove to empty
 LongAccumulatorParam longParam = new LongAccumulatorParam();
 
#line 60 
   this.accumulator = sparkContext.accumulator(initValue, accumulatorName, longParam);
   
   this.accumulator = sparkContext.sc().longAccumulator(accumulatorName);
   
#line 91-107 remove
class LongAccumulatorParam implements AccumulatorParam<Long> {

    @Override
    public Long addAccumulator(Long t1, Long t2) {
      return t1 + t2;
    }

    @Override
    public Long addInPlace(Long r1, Long r2) {
      return r1 + r2;
    }

    @Override
    public Long zero(Long initialValue) {
      return 0L;
    }
  }

8.2 spark-client/src/main/java/org/apache/hive/spark/client/metrics/ShuffleWriteMetrics.java

1
2
3
4
5
6
7
8
# spark-client/src/main/java/org/apache/hive/spark/client/metrics/ShuffleWriteMetrics.java

#line 50-51
    this(metrics.shuffleWriteMetrics().shuffleBytesWritten(),
      metrics.shuffleWriteMetrics().shuffleWriteTime());
      
    this(metrics.shuffleWriteMetrics().bytesWritten(),
      metrics.shuffleWriteMetrics().bytesWritten());

8.3 ql/src/test/org/apache/hadoop/hive/ql/stats/TestStatsUtils.java

1
2
3
4
5
# ql/src/test/org/apache/hadoop/hive/ql/stats/TestStatsUtils.java

#line 34
import org.spark_project.guava.collect.Sets;
import org.sparkproject.guava.collect.Sets;

9. 编译通过

1
mvn clean package -DskipTests -Pdist

10. Hive-19316-Fix

ref1

ref2

10.1 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/ColumnsStatsUtils.java

1
2
#新建java类文件
# standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/ColumnsStatsUtils.java
  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.apache.hadoop.hive.metastore.columnstats;

import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.columnstats.cache.DateColumnStatsDataInspector;
import org.apache.hadoop.hive.metastore.columnstats.cache.DecimalColumnStatsDataInspector;
import org.apache.hadoop.hive.metastore.columnstats.cache.DoubleColumnStatsDataInspector;
import org.apache.hadoop.hive.metastore.columnstats.cache.LongColumnStatsDataInspector;
import org.apache.hadoop.hive.metastore.columnstats.cache.StringColumnStatsDataInspector;

/**
 * Utils class for columnstats package.
 */
public final class ColumnsStatsUtils {

    private ColumnsStatsUtils(){}

    /**
     * Convertes to DateColumnStatsDataInspector if it's a DateColumnStatsData.
     * @param cso ColumnStatisticsObj
     * @return DateColumnStatsDataInspector
     */
    public static DateColumnStatsDataInspector dateInspectorFromStats(ColumnStatisticsObj cso) {
        DateColumnStatsDataInspector dateColumnStats;
        if (cso.getStatsData().getDateStats() instanceof DateColumnStatsDataInspector) {
            dateColumnStats =
                    (DateColumnStatsDataInspector)(cso.getStatsData().getDateStats());
        } else {
            dateColumnStats = new DateColumnStatsDataInspector(cso.getStatsData().getDateStats());
        }
        return dateColumnStats;
    }

    /**
     * Convertes to StringColumnStatsDataInspector
     * if it's a StringColumnStatsData.
     * @param cso ColumnStatisticsObj
     * @return StringColumnStatsDataInspector
     */
    public static StringColumnStatsDataInspector stringInspectorFromStats(ColumnStatisticsObj cso) {
        StringColumnStatsDataInspector columnStats;
        if (cso.getStatsData().getStringStats() instanceof StringColumnStatsDataInspector) {
            columnStats =
                    (StringColumnStatsDataInspector)(cso.getStatsData().getStringStats());
        } else {
            columnStats = new StringColumnStatsDataInspector(cso.getStatsData().getStringStats());
        }
        return columnStats;
    }

    /**
     * Convertes to LongColumnStatsDataInspector if it's a LongColumnStatsData.
     * @param cso ColumnStatisticsObj
     * @return LongColumnStatsDataInspector
     */
    public static LongColumnStatsDataInspector longInspectorFromStats(ColumnStatisticsObj cso) {
        LongColumnStatsDataInspector columnStats;
        if (cso.getStatsData().getLongStats() instanceof LongColumnStatsDataInspector) {
            columnStats =
                    (LongColumnStatsDataInspector)(cso.getStatsData().getLongStats());
        } else {
            columnStats = new LongColumnStatsDataInspector(cso.getStatsData().getLongStats());
        }
        return columnStats;
    }

    /**
     * Convertes to DoubleColumnStatsDataInspector
     * if it's a DoubleColumnStatsData.
     * @param cso ColumnStatisticsObj
     * @return DoubleColumnStatsDataInspector
     */
    public static DoubleColumnStatsDataInspector doubleInspectorFromStats(ColumnStatisticsObj cso) {
        DoubleColumnStatsDataInspector columnStats;
        if (cso.getStatsData().getDoubleStats() instanceof DoubleColumnStatsDataInspector) {
            columnStats =
                    (DoubleColumnStatsDataInspector)(cso.getStatsData().getDoubleStats());
        } else {
            columnStats = new DoubleColumnStatsDataInspector(cso.getStatsData().getDoubleStats());
        }
        return columnStats;
    }

    /**
     * Convertes to DecimalColumnStatsDataInspector
     * if it's a DecimalColumnStatsData.
     * @param cso ColumnStatisticsObj
     * @return DecimalColumnStatsDataInspector
     */
    public static DecimalColumnStatsDataInspector decimalInspectorFromStats(ColumnStatisticsObj cso) {
        DecimalColumnStatsDataInspector columnStats;
        if (cso.getStatsData().getDecimalStats() instanceof DecimalColumnStatsDataInspector) {
            columnStats =
                    (DecimalColumnStatsDataInspector)(cso.getStatsData().getDecimalStats());
        } else {
            columnStats = new DecimalColumnStatsDataInspector(cso.getStatsData().getDecimalStats());
        }
        return columnStats;
    }
}

10.2 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/aggr/DateColumnStatsAggregator.java

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
# standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/aggr/DateColumnStatsAggregator.java

#line 40 add

import static org.apache.hadoop.hive.metastore.columnstats.ColumnsStatsUtils.dateInspectorFromStats;

#line 66
          (DateColumnStatsDataInspector) cso.getStatsData().getDateStats();
          dateInspectorFromStats(cso);
          
#line 99
            (DateColumnStatsDataInspector) cso.getStatsData().getDateStats();
            dateInspectorFromStats(cso);
            
#line 178
              (DateColumnStatsDataInspector) cso.getStatsData().getDateStats();
              dateInspectorFromStats(cso);

10.3 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/aggr/DecimalColumnStatsAggregator.java

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
# standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/aggr/DecimalColumnStatsAggregator.java

#line 42 add 
import static org.apache.hadoop.hive.metastore.columnstats.ColumnsStatsUtils.decimalInspectorFromStats;

#line 69
          (DecimalColumnStatsDataInspector) cso.getStatsData().getDecimalStats();
          decimalInspectorFromStats(cso);
          
#line 102
            (DecimalColumnStatsDataInspector) cso.getStatsData().getDecimalStats();
            decimalInspectorFromStats(cso);
            
#line 191
              (DecimalColumnStatsDataInspector) cso.getStatsData().getDecimalStats();
              decimalInspectorFromStats(cso);

10.4 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/aggr/DoubleColumnStatsAggregator.java

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
# standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/aggr/DoubleColumnStatsAggregator.java

#line 39 add 
import static org.apache.hadoop.hive.metastore.columnstats.ColumnsStatsUtils.doubleInspectorFromStats;

#line 66
          (DoubleColumnStatsDataInspector) cso.getStatsData().getDoubleStats();
          doubleInspectorFromStats(cso);
#line 99
            (DoubleColumnStatsDataInspector) cso.getStatsData().getDoubleStats();
            doubleInspectorFromStats(cso);
#line 176
              (DoubleColumnStatsDataInspector) cso.getStatsData().getDoubleStats();
              doubleInspectorFromStats(cso);

10.5 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/aggr/LongColumnStatsAggregator.java

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
# standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/aggr/LongColumnStatsAggregator.java

#line 40 add
import static org.apache.hadoop.hive.metastore.columnstats.ColumnsStatsUtils.longInspectorFromStats;

#line 67
          (LongColumnStatsDataInspector) cso.getStatsData().getLongStats();
          longInspectorFromStats(cso);
#line 100
            (LongColumnStatsDataInspector) cso.getStatsData().getLongStats();
            longInspectorFromStats(cso);
#line 178
              (LongColumnStatsDataInspector) cso.getStatsData().getLongStats();
              longInspectorFromStats(cso);

10.6 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/aggr/StringColumnStatsAggregator.java

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
# standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/aggr/StringColumnStatsAggregator.java

#line 40 add
import static org.apache.hadoop.hive.metastore.columnstats.ColumnsStatsUtils.stringInspectorFromStats;

#line 67
          (StringColumnStatsDataInspector) cso.getStatsData().getStringStats();
          stringInspectorFromStats(cso);
#line 97
            (StringColumnStatsDataInspector) cso.getStatsData().getStringStats();
            stringInspectorFromStats(cso);
 #line 152
               (StringColumnStatsDataInspector) cso.getStatsData().getStringStats();
              stringInspectorFromStats(cso);

10.7 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/cache/DateColumnStatsDataInspector.java

1
2
3
4
5
6
7
8
# standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/cache/DateColumnStatsDataInspector.java

#line 46 add
  public DateColumnStatsDataInspector(DateColumnStatsData other) {
    super(other);
  }
  
 

10.8 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/cache/DecimalColumnStatsDataInspector.java

1
2
3
4
5
6
7
#  standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/cache/DecimalColumnStatsDataInspector.java

#line 46 add
  public DecimalColumnStatsDataInspector(DecimalColumnStatsData other) {
    super(other);
  }

10.9 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/cache/DoubleColumnStatsDataInspector.java

1
2
3
4
5
6
7
# standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/cache/DoubleColumnStatsDataInspector.java

#line 46 add
  public DoubleColumnStatsDataInspector(DoubleColumnStatsData other) {
    super(other);
  }

10.10 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/cache/LongColumnStatsDataInspector.java

1
2
3
4
5
6
7
#   standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/cache/LongColumnStatsDataInspector.java

#line 46 add
  public LongColumnStatsDataInspector(LongColumnStatsData other) {
    super(other);
  }

10.11 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/cache/StringColumnStatsDataInspector.java

1
2
3
4
5
6
7
# standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/cache/StringColumnStatsDataInspector.java

#line 46 add
  public StringColumnStatsDataInspector(StringColumnStatsData other) {
    super(other);
  }

10.12 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/DateColumnStatsMerger.java

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
# standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/DateColumnStatsMerger.java

#line 26 add
import static org.apache.hadoop.hive.metastore.columnstats.ColumnsStatsUtils.dateInspectorFromStats;

#30-33 replace
    DateColumnStatsDataInspector aggregateData =
        (DateColumnStatsDataInspector) aggregateColStats.getStatsData().getDateStats();
    DateColumnStatsDataInspector newData =
        (DateColumnStatsDataInspector) newColStats.getStatsData().getDateStats();
    
    
    
    DateColumnStatsDataInspector aggregateData = dateInspectorFromStats(aggregateColStats);
    DateColumnStatsDataInspector newData = dateInspectorFromStats(newColStats);

10.13 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/DecimalColumnStatsMerger.java

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
#  standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/DecimalColumnStatsMerger.java

#line 26 add
import static org.apache.hadoop.hive.metastore.columnstats.ColumnsStatsUtils.decimalInspectorFromStats;

#line 31
        (DecimalColumnStatsDataInspector) aggregateColStats.getStatsData().getDecimalStats();
            decimalInspectorFromStats(aggregateColStats);
            
#line  33
        (DecimalColumnStatsDataInspector) newColStats.getStatsData().getDecimalStats();
            decimalInspectorFromStats(newColStats);

10.14 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/DoubleColumnStatsMerger.java

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
#  standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/DoubleColumnStatsMerger.java

# line 25 add
import static org.apache.hadoop.hive.metastore.columnstats.ColumnsStatsUtils.doubleInspectorFromStats;


#line 29-32
    DoubleColumnStatsDataInspector aggregateData =
        (DoubleColumnStatsDataInspector) aggregateColStats.getStatsData().getDoubleStats();
    DoubleColumnStatsDataInspector newData =
        (DoubleColumnStatsDataInspector) newColStats.getStatsData().getDoubleStats();
        
    
    DoubleColumnStatsDataInspector aggregateData = doubleInspectorFromStats(aggregateColStats);
    DoubleColumnStatsDataInspector newData = doubleInspectorFromStats(newColStats);

10.15 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/LongColumnStatsMerger.java

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
#  standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/LongColumnStatsMerger.java


#line 25 add
import static org.apache.hadoop.hive.metastore.columnstats.ColumnsStatsUtils.longInspectorFromStats;

#line 29-32
    LongColumnStatsDataInspector aggregateData =
        (LongColumnStatsDataInspector) aggregateColStats.getStatsData().getLongStats();
    LongColumnStatsDataInspector newData =
        (LongColumnStatsDataInspector) newColStats.getStatsData().getLongStats();
    LongColumnStatsDataInspector aggregateData = longInspectorFromStats(aggregateColStats);
    
   
    LongColumnStatsDataInspector aggregateData = longInspectorFromStats(aggregateColStats);
    LongColumnStatsDataInspector newData = longInspectorFromStats(newColStats);

10.16 standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/StringColumnStatsMerger.java

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
#  standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/StringColumnStatsMerger.java

#line 25 add
import static org.apache.hadoop.hive.metastore.columnstats.ColumnsStatsUtils.stringInspectorFromStats;

#line 29-32
    StringColumnStatsDataInspector aggregateData =
        (StringColumnStatsDataInspector) aggregateColStats.getStatsData().getStringStats();
    StringColumnStatsDataInspector newData =
        (StringColumnStatsDataInspector) newColStats.getStatsData().getStringStats();
        
        
    StringColumnStatsDataInspector aggregateData = stringInspectorFromStats(aggregateColStats);
    StringColumnStatsDataInspector newData = stringInspectorFromStats(newColStats);

11. 编译通过

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
mvn clean package -DskipTests -Pdist
#mvn clean package -Dmaven.test.skip=true -Dmaven.javadoc.skip=true
#mvn clean package -Pdist -DskipTests -Dmaven.javadoc.skip=true
# https://cwiki.apache.org/confluence/display/hive/hivedeveloperfaq#HiveDeveloperFAQ-Howtobuildallsource?
# How to generate tarball?
#It will then be located in the packaging/target/ directory.
#mvn clean package -DskipTests -Pdist
#-DskipTests,不执行测试用例,但编译测试用例类生成相应的class文件至target/test-classes下。
#-Dmaven.test.skip=true,不执行测试用例,也不编译测试用例类。
#-Dmaven.javadoc.skip=true 跳过生成javadoc

12. 修改后的源码

github

gitee

13. 下载编译即可

1
2
3
4
5
#git clone https://github.com/forsre/hive3.1.2.git
git clone https://gitee.com/forsre/hive3.1.2.git
cd hive3.1.2 
mvn clean package -DskipTests -Pdist
jar包位于packaging/target