[host ~]$ beeline Beeline version 1.2.1000.2.6.1.0-129 by Apache Hive beeline> !connect jdbc:hive2://datanode1:10500 Connecting to jdbc:hive2://datanode1:10500 Enter username for jdbc:hive2://datanode1:10500: hive Enter password for jdbc:hive2://datanode1:10500: ******* Connected to: Apache Hive (version 2.1.0.2.6.1.0-129) Driver: Hive JDBC (version 1.2.1000.2.6.1.0-129) Transaction isolation: TRANSACTION_REPEATABLE_READ 0: jdbc:hive2://datanode1:10500> set druid.metadata.storage.type=postgresql 0: jdbc:hive2://datanode1:10500> set druid.extensions.loadList=["postgresql-metadata-storage"] 0: jdbc:hive2://datanode1:10500> set DRUID_USERNAME=druid; No rows affected (0.05 seconds) 0: jdbc:hive2://datanode1:10500> set DRUID_PASSWORD=druid; No rows affected (0.003 seconds) 0: jdbc:hive2://datanode1:10500> set druid.metadata.storage.type=postgresql; No rows affected (0.004 seconds) 0: jdbc:hive2://datanode1:10500> set druid.extensions.loadList=["postgresql-metadata-storage"]; No rows affected (0.003 seconds) 0: jdbc:hive2://datanode1:10500> set DRUID_USERNAME=druid; No rows affected (0.003 seconds) 0: jdbc:hive2://datanode1:10500> set DRUID_PASSWORD=druid; No rows affected (0.003 seconds) 0: jdbc:hive2://datanode1:10500> set METADATA_DRUID_HOST=postgres_host_ip; No rows affected (0.003 seconds) 0: jdbc:hive2://datanode1:10500> set BROKER_DRUID_HOST=datanode1; No rows affected (0.002 seconds) 0: jdbc:hive2://datanode1:10500> set COORDINATOR_DRUID_HOST=coodinator_host_ip; No rows affected (0.003 seconds) 0: jdbc:hive2://datanode1:10500> set hive.druid.metadata.username=${DRUID_USERNAME}; No rows affected (0.003 seconds) 0: jdbc:hive2://datanode1:10500> set hive.druid.metadata.password=${DRUID_PASSWORD}; No rows affected (0.003 seconds) 0: jdbc:hive2://datanode1:10500> set druid.metadata.storage.connector.user=${DRUID_USERNAME}; No rows affected (0.002 seconds) 0: jdbc:hive2://datanode1:10500> set druid.metadata.storage.connector.password=${DRUID_PASSWORD}; No rows affected (0.003 seconds) 0: jdbc:hive2://datanode1:10500> set hive.druid.metadata.uri=jdbc:postgresql://${METADATA_DRUID_HOST}:5432/druid; No rows affected (0.002 seconds) 0: jdbc:hive2://datanode1:10500> set druid.metadata.storage.connector.connectURI=jdbc:postgresql://${METADATA_DRUID_HOST}:5432/druid; No rows affected (0.002 seconds) 0: jdbc:hive2://datanode1:10500> set hive.druid.indexer.partition.size.max=1000000; No rows affected (0.002 seconds) 0: jdbc:hive2://datanode1:10500> set hive.druid.indexer.memory.rownum.max=100000; No rows affected (0.002 seconds) 0: jdbc:hive2://datanode1:10500> set hive.druid.broker.address.default=${BROKER_DRUID_HOST}:8082; No rows affected (0.002 seconds) 0: jdbc:hive2://datanode1:10500> set hive.druid.coordinator.address.default=${COORDINATOR_DRUID_HOST}:8081; No rows affected (0.002 seconds) 0: jdbc:hive2://datanode1:10500> set hive.druid.storage.storageDirectory=/apps/hive/warehouse; No rows affected (0.003 seconds) 0: jdbc:hive2://datanode1:10500> set hive.tez.container.size=1024; No rows affected (0.002 seconds) 0: jdbc:hive2://datanode1:10500> set hive.druid.passiveWaitTimeMs=180000; No rows affected (0.003 seconds) 0: jdbc:hive2://datanode1:10500> CREATE TABLE test_druid.test_raw_druid STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' 0: jdbc:hive2://datanode1:10500> TBLPROPERTIES ( "druid.datasource" = "test_raw_druid", "druid.segment.granularity" = "MONTH", "druid.query.granularity" = "DAY") 0: jdbc:hive2://datanode1:10500> AS select cast(col1 as timestamp) as `__time`, cast(col2 as string) col2 from test.sample_complete limit 10; INFO : Compiling command(queryId=hive_20180427171132_6328a57a-0a77-4fe5-8e22-7c2bf671604c): CREATE TABLE test_druid.test_raw_druid STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.datasource" = "test_raw_druid", "druid.segment.granularity" = "MONTH", "druid.query.granularity" = "DAY") AS select cast(col1 as timestamp) as `__time`, cast(col2 as string) col2 from test.sample_complete limit 10 INFO : We are setting the hadoop caller context from HIVE_SSN_ID:94ddd2ce-ddf7-464d-8005-e41964995186 to hive_20180427171132_6328a57a-0a77-4fe5-8e22-7c2bf671604c INFO : Semantic Analysis Completed INFO : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:__time, type:timestamp, comment:null), FieldSchema(name:col2, type:string, comment:null)], properties:null) INFO : Completed compiling command(queryId=hive_20180427171132_6328a57a-0a77-4fe5-8e22-7c2bf671604c); Time taken: 0.16 seconds INFO : We are resetting the hadoop caller context to HIVE_SSN_ID:94ddd2ce-ddf7-464d-8005-e41964995186 INFO : Setting caller context to query id hive_20180427171132_6328a57a-0a77-4fe5-8e22-7c2bf671604c INFO : Executing command(queryId=hive_20180427171132_6328a57a-0a77-4fe5-8e22-7c2bf671604c): CREATE TABLE test_druid.test_raw_druid STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.datasource" = "test_raw_druid", "druid.segment.granularity" = "MONTH", "druid.query.granularity" = "DAY") AS select cast(col1 as timestamp) as `__time`, cast(col2 as string) col2 from test.sample_complete limit 10 INFO : Query ID = hive_20180427171132_6328a57a-0a77-4fe5-8e22-7c2bf671604c INFO : Total jobs = 1 INFO : Launching Job 1 out of 1 INFO : Starting task [Stage-1:MAPRED] in parallel INFO : Session is already open INFO : Tez session missing resources, adding additional necessary resources INFO : Dag name: CREATE TABLE test_druid.test_ra...10(Stage-1) INFO : Status: Running (Executing on YARN cluster with App id application_1524629970018_0003) -------------------------------------------------------------------------------- VERTICES MODE STATUS TOTAL COMPLETED RUNNING PENDING FAILED -------------------------------------------------------------------------------- Map 1 .......... llap SUCCEEDED 1 1 0 0 0 Reducer 2 ...... llap SUCCEEDED 1 1 0 0 0 Reducer 3 ...... llap SUCCEEDED 49 49 0 0 0 -------------------------------------------------------------------------------- VERTICES: 03/03 [==========================>>] 100% ELAPSED TIME: 194.31 s -------------------------------------------------------------------------------- INFO : Status: DAG finished successfully in 2.20 seconds INFO : INFO : Query Execution Summary INFO : ---------------------------------------------------------------------------------------------- INFO : OPERATION DURATION INFO : ---------------------------------------------------------------------------------------------- INFO : Compile Query 0.00s INFO : Prepare Plan 0.00s INFO : Submit Plan 1524829293.92s INFO : Start DAG 0.21s INFO : Run DAG 2.20s INFO : ---------------------------------------------------------------------------------------------- INFO : INFO : Task Execution Summary INFO : ---------------------------------------------------------------------------------------------- INFO : VERTICES DURATION(ms) CPU_TIME(ms) GC_TIME(ms) INPUT_RECORDS OUTPUT_RECORDS INFO : ---------------------------------------------------------------------------------------------- INFO : Map 1 0.00 0 0 15 10 INFO : Reducer 2 0.00 0 0 10 10 INFO : Reducer 3 1197.00 0 0 10 0 INFO : ---------------------------------------------------------------------------------------------- INFO : INFO : LLAP IO Summary INFO : ---------------------------------------------------------------------------------------------- INFO : VERTICES ROWGROUPS META_HIT META_MISS DATA_HIT DATA_MISS ALLOCATION USED TOTAL_IO INFO : ---------------------------------------------------------------------------------------------- INFO : Map 1 0 0 0 0B 0B 0B 0B 0.00s INFO : ---------------------------------------------------------------------------------------------- INFO : INFO : FileSystem Counters Summary INFO : INFO : Scheme: FILE INFO : ---------------------------------------------------------------------------------------------- INFO : VERTICES BYTES_READ READ_OPS LARGE_READ_OPS BYTES_WRITTEN WRITE_OPS INFO : ---------------------------------------------------------------------------------------------- INFO : Map 1 0B 0 0 98B 0 INFO : Reducer 2 0B 0 0 2.22KB 0 INFO : Reducer 3 0B 0 0 0B 0 INFO : ---------------------------------------------------------------------------------------------- INFO : INFO : Scheme: HDFS INFO : ---------------------------------------------------------------------------------------------- INFO : VERTICES BYTES_READ READ_OPS LARGE_READ_OPS BYTES_WRITTEN WRITE_OPS INFO : ---------------------------------------------------------------------------------------------- INFO : Map 1 49.93KB 1 0 0B 0 INFO : Reducer 2 0B 0 0 0B 0 INFO : Reducer 3 0B 50 0 3.46KB 50 INFO : ---------------------------------------------------------------------------------------------- INFO : INFO : org.apache.tez.common.counters.DAGCounter: INFO : NUM_SUCCEEDED_TASKS: 51 INFO : TOTAL_LAUNCHED_TASKS: 51 INFO : DATA_LOCAL_TASKS: 1 INFO : AM_CPU_MILLISECONDS: 1630 INFO : AM_GC_TIME_MILLIS: 0 INFO : File System Counters: INFO : FILE_BYTES_READ: 0 INFO : FILE_BYTES_WRITTEN: 2319 INFO : FILE_READ_OPS: 0 INFO : FILE_LARGE_READ_OPS: 0 INFO : FILE_WRITE_OPS: 0 INFO : HDFS_BYTES_READ: 49930 INFO : HDFS_BYTES_WRITTEN: 3458 INFO : HDFS_READ_OPS: 51 INFO : HDFS_LARGE_READ_OPS: 0 INFO : HDFS_WRITE_OPS: 50 INFO : org.apache.tez.common.counters.TaskCounter: INFO : REDUCE_INPUT_GROUPS: 2 INFO : REDUCE_INPUT_RECORDS: 20 INFO : COMBINE_INPUT_RECORDS: 0 INFO : SPILLED_RECORDS: 40 INFO : NUM_SHUFFLED_INPUTS: 2 INFO : NUM_SKIPPED_INPUTS: 48 INFO : NUM_FAILED_SHUFFLE_INPUTS: 0 INFO : MERGED_MAP_OUTPUTS: 2 INFO : INPUT_RECORDS_PROCESSED: 16 INFO : INPUT_SPLIT_LENGTH_BYTES: 49930 INFO : OUTPUT_RECORDS: 20 INFO : OUTPUT_LARGE_RECORDS: 0 INFO : OUTPUT_BYTES: 300 INFO : OUTPUT_BYTES_WITH_OVERHEAD: 500 INFO : OUTPUT_BYTES_PHYSICAL: 1103 INFO : ADDITIONAL_SPILLS_BYTES_WRITTEN: 77 INFO : ADDITIONAL_SPILLS_BYTES_READ: 143 INFO : ADDITIONAL_SPILL_COUNT: 0 INFO : SHUFFLE_CHUNK_COUNT: 2 INFO : SHUFFLE_BYTES: 143 INFO : SHUFFLE_BYTES_DECOMPRESSED: 212 INFO : SHUFFLE_BYTES_TO_MEM: 77 INFO : SHUFFLE_BYTES_TO_DISK: 0 INFO : SHUFFLE_BYTES_DISK_DIRECT: 66 INFO : NUM_MEM_TO_DISK_MERGES: 0 INFO : NUM_DISK_TO_DISK_MERGES: 0 INFO : SHUFFLE_PHASE_TIME: 2032 INFO : MERGE_PHASE_TIME: 2179 INFO : FIRST_EVENT_RECEIVED: 1365 INFO : LAST_EVENT_RECEIVED: 1365 INFO : HIVE: INFO : CREATED_FILES: 1 INFO : DESERIALIZE_ERRORS: 0 INFO : RECORDS_IN_Map_1: 15 INFO : RECORDS_OUT_1_test_druid.test_raw_druid: 10 INFO : RECORDS_OUT_INTERMEDIATE_Map_1: 10 INFO : RECORDS_OUT_INTERMEDIATE_Reducer_2: 10 INFO : Shuffle Errors: INFO : BAD_ID: 0 INFO : CONNECTION: 0 INFO : IO_ERROR: 0 INFO : WRONG_LENGTH: 0 INFO : WRONG_MAP: 0 INFO : WRONG_REDUCE: 0 INFO : Shuffle Errors_Reducer_2_INPUT_Map_1: INFO : BAD_ID: 0 INFO : CONNECTION: 0 INFO : IO_ERROR: 0 INFO : WRONG_LENGTH: 0 INFO : WRONG_MAP: 0 INFO : WRONG_REDUCE: 0 INFO : Shuffle Errors_Reducer_3_INPUT_Reducer_2: INFO : BAD_ID: 0 INFO : CONNECTION: 0 INFO : IO_ERROR: 0 INFO : WRONG_LENGTH: 0 INFO : WRONG_MAP: 0 INFO : WRONG_REDUCE: 0 INFO : TaskCounter_Map_1_INPUT_sample_complete: INFO : INPUT_RECORDS_PROCESSED: 16 INFO : INPUT_SPLIT_LENGTH_BYTES: 49930 INFO : TaskCounter_Map_1_OUTPUT_Reducer_2: INFO : ADDITIONAL_SPILLS_BYTES_READ: 0 INFO : ADDITIONAL_SPILLS_BYTES_WRITTEN: 0 INFO : ADDITIONAL_SPILL_COUNT: 0 INFO : OUTPUT_BYTES: 90 INFO : OUTPUT_BYTES_PHYSICAL: 66 INFO : OUTPUT_BYTES_WITH_OVERHEAD: 100 INFO : OUTPUT_LARGE_RECORDS: 0 INFO : OUTPUT_RECORDS: 10 INFO : SHUFFLE_CHUNK_COUNT: 1 INFO : SPILLED_RECORDS: 10 INFO : TaskCounter_Reducer_2_INPUT_Map_1: INFO : ADDITIONAL_SPILLS_BYTES_READ: 66 INFO : ADDITIONAL_SPILLS_BYTES_WRITTEN: 0 INFO : COMBINE_INPUT_RECORDS: 0 INFO : FIRST_EVENT_RECEIVED: 3 INFO : LAST_EVENT_RECEIVED: 3 INFO : MERGED_MAP_OUTPUTS: 1 INFO : MERGE_PHASE_TIME: 7 INFO : NUM_DISK_TO_DISK_MERGES: 0 INFO : NUM_FAILED_SHUFFLE_INPUTS: 0 INFO : NUM_MEM_TO_DISK_MERGES: 0 INFO : NUM_SHUFFLED_INPUTS: 1 INFO : NUM_SKIPPED_INPUTS: 0 INFO : REDUCE_INPUT_GROUPS: 1 INFO : REDUCE_INPUT_RECORDS: 10 INFO : SHUFFLE_BYTES: 66 INFO : SHUFFLE_BYTES_DECOMPRESSED: 100 INFO : SHUFFLE_BYTES_DISK_DIRECT: 66 INFO : SHUFFLE_BYTES_TO_DISK: 0 INFO : SHUFFLE_BYTES_TO_MEM: 0 INFO : SHUFFLE_PHASE_TIME: 6 INFO : SPILLED_RECORDS: 10 INFO : TaskCounter_Reducer_2_OUTPUT_Reducer_3: INFO : ADDITIONAL_SPILLS_BYTES_READ: 0 INFO : ADDITIONAL_SPILLS_BYTES_WRITTEN: 0 INFO : ADDITIONAL_SPILL_COUNT: 0 INFO : OUTPUT_BYTES: 210 INFO : OUTPUT_BYTES_PHYSICAL: 1037 INFO : OUTPUT_BYTES_WITH_OVERHEAD: 400 INFO : OUTPUT_LARGE_RECORDS: 0 INFO : OUTPUT_RECORDS: 10 INFO : SHUFFLE_CHUNK_COUNT: 1 INFO : SPILLED_RECORDS: 10 INFO : TaskCounter_Reducer_3_INPUT_Reducer_2: INFO : ADDITIONAL_SPILLS_BYTES_READ: 77 INFO : ADDITIONAL_SPILLS_BYTES_WRITTEN: 77 INFO : COMBINE_INPUT_RECORDS: 0 INFO : FIRST_EVENT_RECEIVED: 1362 INFO : LAST_EVENT_RECEIVED: 1362 INFO : MERGED_MAP_OUTPUTS: 1 INFO : MERGE_PHASE_TIME: 2172 INFO : NUM_DISK_TO_DISK_MERGES: 0 INFO : NUM_FAILED_SHUFFLE_INPUTS: 0 INFO : NUM_MEM_TO_DISK_MERGES: 0 INFO : NUM_SHUFFLED_INPUTS: 1 INFO : NUM_SKIPPED_INPUTS: 48 INFO : REDUCE_INPUT_GROUPS: 1 INFO : REDUCE_INPUT_RECORDS: 10 INFO : SHUFFLE_BYTES: 77 INFO : SHUFFLE_BYTES_DECOMPRESSED: 112 INFO : SHUFFLE_BYTES_DISK_DIRECT: 0 INFO : SHUFFLE_BYTES_TO_DISK: 0 INFO : SHUFFLE_BYTES_TO_MEM: 77 INFO : SHUFFLE_PHASE_TIME: 2026 INFO : SPILLED_RECORDS: 10 INFO : TaskCounter_Reducer_3_OUTPUT_out_Reducer_3: INFO : OUTPUT_RECORDS: 0 INFO : Starting task [Stage-2:DEPENDENCY_COLLECTION] in serial mode INFO : Starting task [Stage-0:MOVE] in serial mode INFO : Moving data to directory hdfs://hdp26/apps/hive/warehouse/test_druid.db/test_raw_druid from hdfs://hdp26/apps/hive/warehouse/test_druid.db/.hive-staging_hive_2018-04-27_17-11-32_773_3028691308897185810-1/-ext-10002 INFO : Starting task [Stage-4:DDL] in serial mode ERROR : FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot load JDBC driver class 'com.mysql.jdbc.Driver' INFO : Resetting the caller context to HIVE_SSN_ID:94ddd2ce-ddf7-464d-8005-e41964995186 INFO : Completed executing command(queryId=hive_20180427171132_6328a57a-0a77-4fe5-8e22-7c2bf671604c); Time taken: 195.474 seconds Error: Error while processing statement: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot load JDBC driver class 'com.mysql.jdbc.Driver' (state=08S01,code=1) 0: jdbc:hive2://datanode1:10500>