Good looking .hiverc file
Following is the .hiverc from one of the hadoop environments I work on,
-- additional .jar includes like the one below -- add jar hdfs://ualprod/tmp/json-serde-1.3.7-jar-with-dependencies.jar; set hive.exec.dynamic.partition.mode=nonstrict; set hive.auto.convert.join.noconditionaltask=true; set hive.optimize.sort.dynamic.partition=true; set hive.exec.max.dynamic.partitions=100000; set hive.exec.max.dynamic.partitions.pernode=10000; -- large mem?? set hive.tez.container.size=10240; set hive.tez.java.opts=-Xmx8192m; set hive.support.sql11.reserved.keywords=false; set hive.execution.engine=tez; set hive.cbo.enable=true; set hive.compute.query.using.stats=true; set hive.stats.fetch.column.stats=true; set hive.stats.fetch.partition.stats=true; set hive.vectorized.execution.enabled=true; set hive.vectorized.execution.reduce.enabled = true; set hive.vectorized.execution.reduce.groupby.enabled = true; set hive.exec.parallel=true; set hive.exec.parallel.thread.number=16; set hive.exec.dynamic.partition.mode=nonstrict; set hive.optimize.sort.dynamic.partition=true; set mapred.reduce.tasks = -1; set hive.tez.auto.reducer.parallelism=true; set hive.tez.min.partition.factor=0.25; set hive.tez.max.partition.factor=2.0; set tez.runtime.pipelined.sorter.lazy-allocate.memory=true;
Some more
set hive.tez.container.size=9216; set hive.tez.java.opts=-Xmx7372m; set hive.fetch.task.conversion=none; set set tez.grouping.max-size=134217728;
HTH