I have few tables in hive which I need to access in my SAS platform.
I have created a Pass through query present below (marked some paths in *) but it is throwing error.
PROC SQL;
CONNECT TO HADOOP(
host = "bigdataplatform-gcg-&amb..lac.nsroot.net"
schema = &ESQUEMA.
HDFS_TEMPDIR = "/data/&HDFS./work/hive/&HDFS._ofc/"
DBMAX_TEXT = *****
uri = "jdbc:hive2://bigdataplatform-gcg-&amb..lac.nsroot.net:10000/&ESQUEMA.;principal=hive/*****T;ssl=true"
scratch_db = "&HDFS._ofc"
subchar=questionmark
);
CREATE TABLE Applicant AS
SELECT *
FROM CONNECTION TO HADOOP (
WITH json_extract AS (
SELECT
get_json_object(xml_data, '$.app.Id') AS ID,
get_json_object(xml_data, '$.app.apply[0].flag') AS Flag,
get_json_object(xml_data, '$.app.apply[0].Product') AS products
FROM check
)
SELECT
ID,
Flag,
get_json_object(product_data, '$.Code') AS Code,
get_json_object(product_data, '$.Line') AS Line,
get_json_object(product_data, '$.status') AS Status
FROM json_extract
LATERAL VIEW OUTER EXPLODE(SPLIT(products, '},')) p AS product_data;
);
disconnect from hadoop;
quit;