Qihoo360 / Quicksql

A Flexible, Fast, Federated(3F) SQL Analysis Middleware for Multiple Data Sources

Home Page:https://quicksql.readthedocs.io

Geek Repo:Geek Repo

Github PK Tool:Github PK Tool

求大神帮忙看个报错ERROR|compiler.err.prob.found.req

QstDz opened this issue · comments

您好,

我这边报错如下:
ERROR|compiler.err.prob.found.req
LINE:COLUMN 25:48
incompatible types: org.apache.spark.sql.DataFrame cannot be converted to org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>

import com.qihoo.qsql.exec.Requirement;
import org.apache.spark.sql.catalyst.expressions.Attribute;
import org.apache.spark.sql.Row;
import java.util.stream.Collectors;
import org.apache.spark.sql.SparkSession;
import java.util.Collections;
import java.util.Map;
import com.qihoo.qsql.exec.spark.SparkRequirement;
import scala.collection.JavaConversions;
import java.util.AbstractMap.SimpleEntry;
import com.qihoo.qsql.codegen.spark.SparkJdbcGenerator;
import org.apache.spark.sql.Dataset;
import java.util.Arrays;
import java.util.Properties;
import java.util.List;

public class Requirement23448 extends SparkRequirement {
public Requirement23448(SparkSession spark){
super(spark);
}

            public Object execute() throws Exception {
                    Dataset<Row> tmp;
                    {
                    tmp = spark.read().jdbc("jdbc:hive2://****:10000/hdw_opn", "(SELECT data_dt, device_num, user_num, rela_grp_user_id, mac_id, machine_seq_num, dev_big_type_cd, dev_big_type_desc, bind_unbind_ind, update_tm, family_id, family_name, pdate FROM hdw_opn.psi_user_dev_bind) hdw_opn_psi_user_dev_bind_0", SparkJdbcGenerator.config("***", "***", "org.apache.hive.jdbc.HiveDriver"));
                    tmp.createOrReplaceTempView("hdw_opn_psi_user_dev_bind_0");
                    }
                    {
                    tmp = spark.read().jdbc("jdbc:hive2://***:30010/dh_zz", "(SELECT mac, barcode, workuser_barcode, workuser_randombarcode, order_code, factory_code, site_code, factory_name, line_code, line_name, product_code, product_name, binding_time, unbinding_time, scantime, bd_flag, dh_etl_date, pt FROM dh_zz.dwd_factory_mac_wifi_code_barcode_detail_info) dh_zz_dwd_factory_mac_wifi_code_barcode_detail_info_1", SparkJdbcGenerator.config("****", "***", "org.apache.hive.jdbc.HiveDriver"));
                    tmp.createOrReplaceTempView("dh_zz_dwd_factory_mac_wifi_code_barcode_detail_info_1");
                    }

自己顶一下,有木有大神在线解答一下