如果您想了解com.facebook.presto.sql.tree.Expression的实例源码的知识,那么本篇文章将是您的不二之选。同时我们将深入剖析com.facebook.presto.Se
如果您想了解com.facebook.presto.sql.tree.Expression的实例源码的知识,那么本篇文章将是您的不二之选。同时我们将深入剖析com.facebook.presto.Session的实例源码、com.facebook.presto.spi.PrestoException的实例源码、com.facebook.presto.sql.tree.AddColumn的实例源码、com.facebook.presto.sql.tree.AliasedRelation的实例源码的各个方面,并给出实际的案例分析,希望能帮助到您!
本文目录一览:- com.facebook.presto.sql.tree.Expression的实例源码
- com.facebook.presto.Session的实例源码
- com.facebook.presto.spi.PrestoException的实例源码
- com.facebook.presto.sql.tree.AddColumn的实例源码
- com.facebook.presto.sql.tree.AliasedRelation的实例源码
com.facebook.presto.sql.tree.Expression的实例源码
protected static String processFuncSet(Formatter formatter,FunctionCall node) { StringBuilder builder = new StringBuilder(); String functionName = getFunctionName(node); int numArguments = node.getArguments().size(); builder.append(functionName).append('(').append(formatter.process(node.getArguments().get(0),null)).append( ')'); if (numArguments > 1) { builder.append(" ON "); } for (int i = 1; i < numArguments; i++) { Expression item = node.getArguments().get(i); if (i == 1) { builder.append(formatter.process(item,null)); } else { builder.append(",").append(formatter.process(item,null)); } } return builder.toString(); }
@Override protected String visitArithmeticExpression(ArithmeticExpression node,Void context) { if (node.getType().equals(ArithmeticExpression.Type.DIVIDE)) { if (_outputDivideByZeroGuard == true) { if (node.getRight() instanceof FunctionCall) { if (getFunctionName((FunctionCall) node.getRight()).equals("nullifzero")) { // bypass appending nullifzero return formatBinaryExpression(node.getType().getValue(),node.getLeft(),node.getRight()); } } else if (node.getRight() instanceof Literal) { // purely literal return formatBinaryExpression(node.getType().getValue(),node.getRight()); } List<Expression> arguments = new ArrayList<Expression>(); arguments.add(node.getRight()); FunctionCall nullifzeroFunc = new FunctionCall(new Qualifiedname("nullifzero"),arguments); return formatBinaryExpression(node.getType().getValue(),nullifzeroFunc); } else { return formatBinaryExpression(node.getType().getValue(),node.getRight()); } } else { return formatBinaryExpression(node.getType().getValue(),node.getRight()); } }
protected String joinPassExpressions(String on,List<Expression> expressions) { return Joiner.on(on).join(transform(expressions,new Function<Expression,Object>() { @Override public Object apply(Expression input) { if (input instanceof QualifiednameReference) { // 20150709: enclose vero ident in () in case association matters return '(' + process(input,null) + ')'; } else { return process(input,null); } } })); }
public static void printExpression(String sql) { println(sql.trim()); println(""); System.out.println("EXP Printing CommonTree toString..."); CommonTree tree = VerosqlParser.parseExpression(sql); println(TreePrinter.treetoString(tree)); println(""); System.out.println("EXP Printing AST toString..."); Expression expression = VerosqlParser.createExpression(tree); println(expression.toString()); println(""); System.out.println("EXP Printing Format sql toString..."); // Todo: support formatting all statement types println(FormatterFactory.getsqlFormatter().formatsql(expression)); println(""); println(repeat("=",60)); println(""); }
@Override protected Void visitValues(Values node,Integer indent) { builder.append(" VALUES "); boolean first = true; for (Expression row : node.getRows()) { builder.append("\n") .append(indentString(indent)) .append(first ? " " : ","); builder.append(formatExpression(row,parameters,indent)); first = false; } builder.append('\n'); return null; }
/** * Parses the list with values to insert and returns them as Objects */ @Override public List<Object> visitValues(Values values,QueryState state){ List<Object> result = new ArrayList<Object>(); for(Expression rowExpression : values.getRows()){ if(rowExpression instanceof Row) { Row row = (Row)rowExpression; for(Expression rowValue : row.getItems()){ if(!(rowValue instanceof Literal)) { state.addException("Unable to parse non-literal value : "+rowValue); return result; } result.add(getobject((Literal)rowValue)); } }else if (rowExpression instanceof Literal){ result.add(getobject((Literal)rowExpression)); }else { state.addException("UnkNown VALUES type "+rowExpression.getClass()+" encountered"); return null; } } return result; }
@Override public PlanNode visitSample(SampleNode node,RewriteContext<Void> context) { if (node.getSampleType() == SampleNode.Type.BERNOULLI) { PlanNode rewrittenSource = context.rewrite(node.getSource()); ComparisonExpression expression = new ComparisonExpression( ComparisonExpression.Type.LESS_THAN,new FunctionCall(Qualifiedname.of("rand"),ImmutableList.<Expression>of()),new DoubleLiteral(Double.toString(node.getSampleRatio()))); return new FilterNode(node.getId(),rewrittenSource,expression); } else if (node.getSampleType() == SampleNode.Type.POISSONIZED || node.getSampleType() == SampleNode.Type.SYstem) { return context.defaultRewrite(node); } throw new UnsupportedOperationException("not yet implemented"); }
@Test public void testFromIsNullPredicate() throws Exception { Expression originalExpression = isNull(A); ExtractionResult result = fromPredicate(originalExpression); assertEquals(result.getRemainingExpression(),TRUE_LIteraL); assertEquals(result.getTupleDomain(),withColumnDomains(ImmutableMap.of(A,Domain.onlyNull(BIGINT)))); originalExpression = isNull(K); result = fromPredicate(originalExpression); assertEquals(result.getRemainingExpression(),withColumnDomains(ImmutableMap.of(K,Domain.onlyNull(HYPER_LOG_LOG)))); originalExpression = not(isNull(A)); result = fromPredicate(originalExpression); assertEquals(result.getRemainingExpression(),Domain.notNull(BIGINT)))); originalExpression = not(isNull(K)); result = fromPredicate(originalExpression); assertEquals(result.getRemainingExpression(),Domain.notNull(HYPER_LOG_LOG)))); }
private OperatorFactory compileFilterWithNoInputColumns(Expression filter,ExpressionCompiler compiler) { filter = ExpressionTreeRewriter.rewriteWith(new SymbolToInputRewriter(ImmutableMap.<Symbol,Integer>of()),filter); IdentityHashMap<Expression,Type> expressionTypes = getExpressionTypesFromInput(TEST_SESSION,Metadata,sql_PARSER,INPUT_TYPES,ImmutableList.of(filter)); try { PageProcessor processor = compiler.compilePageProcessor(toRowExpression(filter,expressionTypes),ImmutableList.of()); return new FilterandProjectOperator.FilterandProjectOperatorFactory(0,new PlanNodeId("test"),processor,ImmutableList.<Type>of()); } catch (Throwable e) { if (e instanceof UncheckedExecutionException) { e = e.getCause(); } throw new RuntimeException("Error compiling " + filter + ": " + e.getMessage(),e); } }
public InterpretedFilterFunction( Expression predicate,Map<Symbol,Type> symbolTypes,Integer> symbolToInputMappings,Metadata Metadata,sqlParser sqlParser,Session session) { // pre-compute symbol -> input mappings and replace the corresponding nodes in the tree Expression rewritten = ExpressionTreeRewriter.rewriteWith(new SymbolToInputRewriter(symbolToInputMappings),predicate); // analyze expression so we can kNow the type of every expression in the tree ImmutableMap.Builder<Integer,Type> inputTypes = ImmutableMap.builder(); for (Map.Entry<Symbol,Integer> entry : symbolToInputMappings.entrySet()) { inputTypes.put(entry.getValue(),symbolTypes.get(entry.getKey())); } IdentityHashMap<Expression,Type> expressionTypes = getExpressionTypesFromInput(session,sqlParser,inputTypes.build(),rewritten); evaluator = ExpressionInterpreter.expressionInterpreter(rewritten,session,expressionTypes); }
@Override public PlanNode visitTableScan(TableScanNode node,RewriteContext<Void> context) { Expression originalConstraint = null; if (node.getoriginalConstraint() != null) { originalConstraint = canonicalizeExpression(node.getoriginalConstraint()); } return new TableScanNode( node.getId(),node.getTable(),node.getoutputSymbols(),node.getAssignments(),node.getLayout(),node.getCurrentConstraint(),originalConstraint); }
@Override public Void visitProject(ProjectNode node,Void context) { StringBuilder builder = new StringBuilder(); for (Map.Entry<Symbol,Expression> entry : node.getAssignments().entrySet()) { if ((entry.getValue() instanceof QualifiednameReference) && ((QualifiednameReference) entry.getValue()).getName().equals(entry.getKey().toQualifiedname())) { // skip identity assignments continue; } builder.append(format("%s := %s\\n",entry.getKey(),entry.getValue())); } printNode(node,"Project",builder.toString(),NODE_COLORS.get(NodeType.PROJECT)); return node.getSource().accept(this,context); }
private Expression rewriteExpression(Expression expression,Predicate<Symbol> symbolScope,boolean allowFullReplacement) { Iterable<Expression> subExpressions = SubExpressionExtractor.extract(expression); if (!allowFullReplacement) { subExpressions = filter(subExpressions,not(equalTo(expression))); } ImmutableMap.Builder<Expression,Expression> expressionRemap = ImmutableMap.builder(); for (Expression subExpression : subExpressions) { Expression canonical = getScopedCanonical(subExpression,symbolScope); if (canonical != null) { expressionRemap.put(subExpression,canonical); } } // Perform a naive single-pass traversal to try to rewrite non-compliant portions of the tree. Prefers to replace // larger subtrees over smaller subtrees // Todo: this rewrite can probably be made more sophisticated Expression rewritten = ExpressionTreeRewriter.rewriteWith(new ExpressionNodeInliner(expressionRemap.build()),expression); if (!symbolToExpressionPredicate(symbolScope).apply(rewritten)) { // If the rewritten is still not compliant with the symbol scope,just give up return null; } return rewritten; }
@Override protected Type visitInPredicate(InPredicate node,StackableAstVisitorContext<AnalysisContext> context) { Expression value = node.getValue(); process(value,context); Expression valueList = node.getValueList(); process(valueList,context); if (valueList instanceof InListExpression) { InListExpression inListExpression = (InListExpression) valueList; coercetoSingleType(context,"IN value and list items must be the same type: %s",ImmutableList.<Expression>builder().add(value).addAll(inListExpression.getValues()).build()); } else if (valueList instanceof SubqueryExpression) { coercetoSingleType(context,node,"value and result of subquery must be of the same type for IN expression: %s vs %s",value,valueList); } expressionTypes.put(node,BOOLEAN); return BOOLEAN; }
@Override public Boolean visitwindowFrame(WindowFrame node,Void context) { Optional<Expression> start = node.getStart().getValue(); if (start.isPresent()) { if (!process(start.get(),context)) { throw new SemanticException(MUST_BE_AGGREGATE_OR_GROUP_BY,start.get(),"Window frame start must be an aggregate expression or appear in GROUP BY clause"); } } if (node.getEnd().isPresent() && node.getEnd().get().getValue().isPresent()) { Expression endValue = node.getEnd().get().getValue().get(); if (!process(endValue,endValue,"Window frame end must be an aggregate expression or appear in GROUP BY clause"); } } return true; }
@Test public void testFromIsNotNullPredicate() throws Exception { Expression originalExpression = isNotNull(A); ExtractionResult result = fromPredicate(originalExpression); assertEquals(result.getRemainingExpression(),Domain.notNull(BIGINT)))); originalExpression = isNotNull(K); result = fromPredicate(originalExpression); assertEquals(result.getRemainingExpression(),Domain.notNull(HYPER_LOG_LOG)))); originalExpression = not(isNotNull(A)); result = fromPredicate(originalExpression); assertEquals(result.getRemainingExpression(),Domain.onlyNull(BIGINT)))); originalExpression = not(isNotNull(K)); result = fromPredicate(originalExpression); assertEquals(result.getRemainingExpression(),Domain.onlyNull(HYPER_LOG_LOG)))); }
@Override public Void visitProject(ProjectNode node,Void context) { PlanNode source = node.getSource(); source.accept(this,context); // visit child verifyUniqueId(node); Set<Symbol> inputs = ImmutableSet.copyOf(source.getoutputSymbols()); for (Expression expression : node.getAssignments().values()) { Set<Symbol> dependencies = DependencyExtractor.extractUnique(expression); checkDependencies(inputs,dependencies,"Invalid node. Expression dependencies (%s) not in source plan output (%s)",inputs); } return null; }
protected static String processFuncatan2(Formatter formatter,FunctionCall node) { Expression x = node.getArguments().get(0); Expression y = node.getArguments().get(1); FunctionCall xx = new FunctionCall(new Qualifiedname("power"),Arrays.asList(x,new LongLiteral("2"))); FunctionCall yy = new FunctionCall(new Qualifiedname("power"),Arrays.asList(y,new LongLiteral("2"))); ArithmeticExpression xxAddyy = new ArithmeticExpression(ArithmeticExpression.Type.ADD,xx,yy); FunctionCall sqrt_xxAddyy = new FunctionCall(new Qualifiedname("sqrt"),Arrays.asList(xxAddyy)); ArithmeticExpression substract = new ArithmeticExpression(ArithmeticExpression.Type.SUBTRACT,sqrt_xxAddyy,x); ArithmeticExpression divide = new ArithmeticExpression(ArithmeticExpression.Type.DIVIDE,substract,y); FunctionCall arctan = new FunctionCall(new Qualifiedname("atan"),Arrays.asList(divide)); ArithmeticExpression multiply = new ArithmeticExpression(ArithmeticExpression.Type.MULTIPLY,new DoubleLiteral("2"),arctan); return formatter.process(multiply,null); }
protected static String processFuncNullifzero(Formatter formatter,FunctionCall node) { Expression x = node.getArguments().get(0); List<WhenClause> listWhen = new ArrayList<WhenClause>(); ComparisonExpression ce = new ComparisonExpression(ComparisonExpression.Type.EQUAL,x,new LongLiteral("0")); WhenClause wc = new WhenClause(ce,new NullLiteral()); listWhen.add(wc); SearchedCaseExpression sce = new SearchedCaseExpression(listWhen,x); return formatter.process(sce,null); }
protected static Expression processFuncLast(ComparisonExpression node) { System.out.println("Processing last()"); Expression rightNode = node.getRight(); Expression leftNode = node.getLeft(); FunctionCall last = (FunctionCall) rightNode; // # of arguments are already checked outside 1 or 2 String number = last.getArguments().get(0).toString(); String format = "DAY"; // default if (last.getArguments().size() == 2) { format = last.getArguments().get(1).toString().replaceAll("\"",""); } IntervalLiteral.Sign sign; if (number.startsWith("-")) { sign = IntervalLiteral.Sign.NEGATIVE; number = number.substring(1); } else { sign = IntervalLiteral.Sign.POSITIVE; } CurrentTime cTime = new CurrentTime(CurrentTime.Type.DATE); IntervalLiteral interval = new IntervalLiteral(number,sign,format); ArithmeticExpression arithmOp = new ArithmeticExpression(ArithmeticExpression.Type.SUBTRACT,cTime,interval); BetweenPredicate bPredicate = new BetweenPredicate(leftNode,arithmOp,cTime); return bPredicate; }
public Function<Expression,String> expressionFormatterFunction() { return new Function<Expression,String>() { @Override public String apply(Expression input) { return formatExpression(input); } }; }
public Function<Expression,String>() { @Override public String apply(Expression input) { return formatExpression(input); } }; }
public Function<Expression,String>() { @Override public String apply(Expression input) { return formatExpression(input); } }; }
public Function<Expression,String>() { @Override public String apply(Expression input) { return formatExpression(input); } }; }
public Function<Expression,String>() { @Override public String apply(Expression input) { return formatExpression(input); } }; }
public Function<Expression,String>() { @Override public String apply(Expression input) { return formatExpression(input); } }; }
@Override protected String visitArrayConstructor(ArrayConstructor node,Void context) { ImmutableList.Builder<String> valueStrings = ImmutableList.builder(); for (Expression value : node.getValues()) { valueStrings.add(sqlFormatter.formatsql(value)); } return "ARRAY[" + Joiner.on(",").join(valueStrings.build()) + "]"; }
protected String formatBinaryExpression(String operator,Expression left,Expression right) { if (operator.equals("")) { // 20150709: +VPC+ return process(left,null) + process(right,null); } else { return '(' + process(left,null) + ' ' + operator + ' ' + process(right,null) + ')'; } }
protected String joinExpressions(List<Expression> expressions) { return Joiner.on(",").join(transform(expressions,Object>() { @Override public Object apply(Expression input) { return process(input,null); } })); }
protected String joinExpressions(String on,null); } })); }
public Function<Expression,String>() { @Override public String apply(Expression input) { return formatExpression(input); } }; }
public Function<Expression,String>() { @Override public String apply(Expression input) { return formatExpression(input); } }; }
public Function<Expression,String>() { @Override public String apply(Expression input) { return formatExpression(input); } }; }
public Function<Expression,String>() { @Override public String apply(Expression input) { return formatExpression(input); } }; }
public Function<Expression,String>() { @Override public String apply(Expression input) { return formatExpression(input); } }; }
public Function<Expression,String>() { @Override public String apply(Expression input) { return formatExpression(input); } }; }
public Function<Expression,String>() { @Override public String apply(Expression input) { return formatExpression(input); } }; }
public Function<Expression,String>() { @Override public String apply(Expression input) { return formatExpression(input); } }; }
public Function<Expression,String>() { @Override public String apply(Expression input) { return formatExpression(input); } }; }
@Override protected String visitArrayConstructor(ArrayConstructor node,StackableAstVisitorContext<Integer> indent) { ImmutableList.Builder<String> valueStrings = ImmutableList.builder(); for (Expression value : node.getValues()) { valueStrings.add(formatExpression(value,indent.getContext() + 1)); } return "ARRAY[" + Joiner.on(",").join(valueStrings.build()) + "]"; }
com.facebook.presto.Session的实例源码
public static QueryRunner createLocalQueryRunner() throws Exception { Session defaultSession = testSessionBuilder() .setCatalog("slack") .setSchema("default") .build(); QueryRunner queryRunner = new distributedQueryRunner(defaultSession,1); queryRunner.installPlugin(new SlackPlugin()); queryRunner.createCatalog( "slack","slack",ImmutableMap.of("token",System.getenv("SLACK_TOKEN"))); return queryRunner; }
public static QueryRunner createLocalQueryRunner() throws Exception { Session defaultSession = testSessionBuilder() .setCatalog("github") .setSchema("default") .build(); QueryRunner queryRunner = new distributedQueryRunner(defaultSession,1); queryRunner.installPlugin(new GithubPlugin()); queryRunner.createCatalog( "github","github",System.getenv("GITHUB_TOKEN"))); return queryRunner; }
private static QueryRunner createLocalQueryRunner() throws Exception { Session defaultSession = TestingSession.testSessionBuilder() .setCatalog("twitter") .setSchema("default") .build(); QueryRunner queryRunner = new distributedQueryRunner(defaultSession,1); queryRunner.installPlugin(new TwitterPlugin()); queryRunner.createCatalog( "twitter","twitter",ImmutableMap.of( "customer_key",System.getenv("TWITTER_CUSTOMER_KEY"),"customer_secret",System.getenv("TWITTER_CUSTOMER_SECRET"),"token",System.getenv("TWITTER_TOKEN"),"secret",System.getenv("TWITTER_SECRET"))); return queryRunner; }
private static LocalQueryRunner createLocalQueryRunner() { Session defaultSession = testSessionBuilder() .setCatalog("tpch") .setSchema(TINY_SCHEMA_NAME) .build(); LocalQueryRunner localQueryRunner = new LocalQueryRunner(defaultSession); InMemoryNodeManager nodeManager = localQueryRunner.getNodeManager(); localQueryRunner.createCatalog("tpch",new TpchConnectorFactory(nodeManager,1),ImmutableMap.<String,String>of()); HyperLogLogPlugin plugin = new HyperLogLogPlugin(); for (Type type : plugin.getTypes()) { localQueryRunner.getTypeManager().addType(type); } for (ParametricType parametricType : plugin.getParametricTypes()) { localQueryRunner.getTypeManager().addParametricType(parametricType); } localQueryRunner.getMetadata().addFunctions(extractFunctions(plugin.getFunctions())); return localQueryRunner; }
@Override public List<QualifiedobjectName> listTables(Session session,QualifiedTablePrefix prefix) { requireNonNull(prefix,"prefix is null"); String schemaNameOrNull = prefix.getSchemaName().orElse(null); Set<QualifiedobjectName> tables = new LinkedHashSet<>(); for (ConnectorEntry entry : allConnectorsFor(prefix.getCatalogName())) { ConnectorMetadata Metadata = entry.getMetadata(session); ConnectorSession connectorSession = session.toConnectorSession(entry.getCatalog()); for (QualifiedobjectName tableName : transform(Metadata.listTables(connectorSession,schemaNameOrNull),convertFromSchemaTableName(prefix.getCatalogName()))) { tables.add(tableName); } } return ImmutableList.copyOf(tables); }
public StatementAnalyzer( Analysis analysis,Metadata Metadata,sqlParser sqlParser,AccessControl accessControl,Session session,boolean experimentalSyntaxEnabled,Optional<QueryExplainer> queryExplainer) { this.analysis = requireNonNull(analysis,"analysis is null"); this.Metadata = requireNonNull(Metadata,"Metadata is null"); this.sqlParser = requireNonNull(sqlParser,"sqlParser is null"); this.accessControl = requireNonNull(accessControl,"accessControl is null"); this.session = requireNonNull(session,"session is null"); this.experimentalSyntaxEnabled = experimentalSyntaxEnabled; this.queryExplainer = requireNonNull(queryExplainer,"queryExplainer is null"); }
public static LocalQueryRunner createLocalQueryRunner() { Session session = testSessionBuilder() .setCatalog("raptor") .setSchema("benchmark") .build(); LocalQueryRunner localQueryRunner = new LocalQueryRunner(session); // add tpch InMemoryNodeManager nodeManager = localQueryRunner.getNodeManager(); localQueryRunner.createCatalog("tpch",String>of()); // add raptor ConnectorFactory raptorConnectorFactory = createraptorConnectorFactory(TPCH_CACHE_DIR,nodeManager); localQueryRunner.createCatalog("raptor",raptorConnectorFactory,ImmutableMap.of()); if (!localQueryRunner.tableExists(session,"orders")) { localQueryRunner.execute("CREATE TABLE orders AS SELECT * FROM tpch.sf1.orders"); } if (!localQueryRunner.tableExists(session,"lineitem")) { localQueryRunner.execute("CREATE TABLE lineitem AS SELECT * FROM tpch.sf1.lineitem"); } return localQueryRunner; }
public String getGraphvizPlan(Session session,Statement statement,Type planType) { DataDeFinitionTask<?> task = dataDeFinitionTask.get(statement.getClass()); if (task != null) { // todo format as graphviz return explainTask(statement,task); } switch (planType) { case LOGICAL: Plan plan = getLogicalPlan(session,statement); return PlanPrinter.graphvizLogicalPlan(plan.getRoot(),plan.getTypes()); case distributeD: SubPlan subPlan = getdistributedplan(session,statement); return PlanPrinter.graphvizdistributedplan(subPlan); } throw new IllegalArgumentException("Unhandled plan type: " + planType); }
@Override public List<QualifiedobjectName> listViews(Session session,"prefix is null"); String schemaNameOrNull = prefix.getSchemaName().orElse(null); Set<QualifiedobjectName> views = new LinkedHashSet<>(); for (ConnectorEntry entry : allConnectorsFor(prefix.getCatalogName())) { ConnectorMetadata Metadata = entry.getMetadata(session); ConnectorSession connectorSession = session.toConnectorSession(entry.getCatalog()); for (QualifiedobjectName tableName : transform(Metadata.listViews(connectorSession,convertFromSchemaTableName(prefix.getCatalogName()))) { views.add(tableName); } } return ImmutableList.copyOf(views); }
public static ExpressionAnalysis analyzeExpressionsWithSymbols( Session session,Map<Symbol,Type> types,Iterable<? extends Expression> expressions) { List<Field> fields = DependencyExtractor.extractUnique(expressions).stream() .map(symbol -> { Type type = types.get(symbol); checkArgument(type != null,"No type for symbol %s",symbol); return Field.newUnqualified(symbol.getName(),type); }) .collect(toImmutableList()); return analyzeExpressions(session,Metadata,sqlParser,new RelationType(fields),expressions); }
public static RowExpression translate( Expression expression,FunctionKind functionKind,IdentityHashMap<Expression,FunctionRegistry functionRegistry,TypeManager typeManager,boolean optimize) { RowExpression result = new Visitor(functionKind,types,typeManager,session.getTimeZoneKey()).process(expression,null); requireNonNull(result,"translated expression is null"); if (optimize) { Expressionoptimizer optimizer = new Expressionoptimizer(functionRegistry,session); return optimizer.optimize(result); } return result; }
public Query(Session session,String query,QueryManager queryManager,ExchangeClient exchangeClient) { requireNonNull(session,"session is null"); requireNonNull(query,"query is null"); requireNonNull(queryManager,"queryManager is null"); requireNonNull(exchangeClient,"exchangeClient is null"); this.session = session; this.queryManager = queryManager; QueryInfo queryInfo = queryManager.createquery(session,query); queryId = queryInfo.getQueryId(); this.exchangeClient = exchangeClient; }
@Override public CompletableFuture<?> execute(RenaMetable statement,TransactionManager transactionManager,QueryStateMachine stateMachine) { Session session = stateMachine.getSession(); QualifiedobjectName tableName = createQualifiedobjectName(session,statement,statement.getSource()); Optional<TableHandle> tableHandle = Metadata.getTableHandle(session,tableName); if (!tableHandle.isPresent()) { throw new SemanticException(MISSING_TABLE,"Table '%s' does not exist",tableName); } QualifiedobjectName target = createQualifiedobjectName(session,statement.getTarget()); if (!Metadata.getCatalogNames().containsKey(target.getCatalogName())) { throw new SemanticException(MISSING_CATALOG,"Target catalog '%s' does not exist",target.getCatalogName()); } if (Metadata.getTableHandle(session,target).isPresent()) { throw new SemanticException(TABLE_ALREADY_EXISTS,"Target table '%s' already exists",target); } if (!tableName.getCatalogName().equals(target.getCatalogName())) { throw new SemanticException(NOT_SUPPORTED,"Table rename across catalogs is not supported"); } accessControl.checkCanRenaMetable(session.getrequiredTransactionId(),session.getIdentity(),tableName,target); Metadata.renaMetable(session,tableHandle.get(),target); return completedFuture(null); }
@POST @Produces(MediaType.APPLICATION_JSON) public Response createquery(String query,@Context HttpServletRequest servletRequest) { assertRequest(!isNullOrEmpty(query),"sql query is empty"); Session session = createSessionForRequest(servletRequest,accessControl,sessionPropertyManager,queryIdGenerator.createNextQueryId()); ClientSession clientSession = session.toClientSession(serverUri(),false,new Duration(2,MINUTES)); StatementClient client = new StatementClient(httpClient,queryResultsCodec,clientSession,query); List<Column> columns = getColumns(client); Iterator<List<Object>> iterator = flatten(new ResultsPageIterator(client)); SimpleQueryResults results = new SimpleQueryResults(columns,iterator); return Response.ok(results,MediaType.APPLICATION_JSON_TYPE).build(); }
private Operator interpretedFilterProject(Expression filter,Expression projection,Session session) { FilterFunction filterFunction = new InterpretedFilterFunction( filter,SYMBOL_TYPES,INPUT_MAPPING,sql_PARSER,session ); ProjectionFunction projectionFunction = new InterpretedProjectionFunction( projection,session ); OperatorFactory operatorFactory = new FilterandProjectOperator.FilterandProjectOperatorFactory(0,new PlanNodeId("test"),new GenericPageProcessor(filterFunction,ImmutableList.of(projectionFunction)),toTypes( ImmutableList.of(projectionFunction))); return operatorFactory.createOperator(createDriverContext(session)); }
public String getPlan(Session session,Type planType) { DataDeFinitionTask<?> task = dataDeFinitionTask.get(statement.getClass()); if (task != null) { return explainTask(statement,statement); return PlanPrinter.textLogicalPlan(plan.getRoot(),plan.getTypes(),session); case distributeD: SubPlan subPlan = getdistributedplan(session,statement); return PlanPrinter.textdistributedplan(subPlan,session); } throw new IllegalArgumentException("Unhandled plan type: " + planType); }
public StandaloneQueryRunner(Session defaultSession) throws Exception { requireNonNull(defaultSession,"defaultSession is null"); try { server = createTestingPrestoServer(); } catch (Exception e) { close(); throw e; } this.prestoClient = new TestingPrestoClient(server,defaultSession); refreshNodes(); server.getMetadata().addFunctions(AbstractTestQueries.CUSTOM_FUNCTIONS); SessionPropertyManager sessionPropertyManager = server.getMetadata().getSessionPropertyManager(); sessionPropertyManager.addSystemSessionProperties(AbstractTestQueries.TEST_SYstem_PROPERTIES); sessionPropertyManager.addConnectorSessionProperties("catalog",AbstractTestQueries.TEST_CATALOG_PROPERTIES); }
public sqlTaskExecution create(Session session,QueryContext queryContext,TaskStateMachine taskStateMachine,SharedBuffer sharedBuffer,PlanFragment fragment,List<TaskSource> sources) { boolean verboseStats = getVerboseStats(session); TaskContext taskContext = queryContext.addTaskContext( taskStateMachine,session,requireNonNull(operatorPreAllocatedMemory,"operatorPreAllocatedMemory is null"),verboseStats,cpuTimerEnabled); return createsqlTaskExecution( taskStateMachine,taskContext,sharedBuffer,fragment,sources,planner,taskExecutor,taskNotificationExecutor,queryMonitor); }
private static distributedQueryRunner createqueryRunner() throws Exception { Session session = testSessionBuilder() .setSource("test") .setCatalog("tpch") .setSchema("tiny") .build(); distributedQueryRunner queryRunner = new distributedQueryRunner(session,4,ImmutableMap.of("optimizer.optimize-hash-generation","false")); try { queryRunner.installPlugin(new TpchPlugin()); queryRunner.createCatalog("tpch","tpch"); queryRunner.installPlugin(new SampledTpchPlugin()); queryRunner.createCatalog("tpch_sampled","tpch_sampled"); return queryRunner; } catch (Exception e) { queryRunner.close(); throw e; } }
/** * Returns list of queues to enter,or null if query does not match rule */ public List<QueryQueueDeFinition> match(Session session) { if (userRegex != null && !userRegex.matcher(session.getUser()).matches()) { return null; } if (sourceRegex != null) { String source = session.getSource().orElse(""); if (!sourceRegex.matcher(source).matches()) { return null; } } for (Map.Entry<String,Pattern> entry : sessionPropertyRegexes.entrySet()) { String value = session.getSystemProperties().getorDefault(entry.getKey(),""); if (!entry.getValue().matcher(value).matches()) { return null; } } return queues; }
@Override public DataDeFinitionExecution<?> createqueryExecution( QueryId queryId,Statement statement) { URI self = locationFactory.createqueryLocation(queryId); DataDeFinitionTask<Statement> task = getTask(statement); checkArgument(task != null,"no task for statement: %s",statement.getClass().getSimpleName()); QueryStateMachine stateMachine = QueryStateMachine.begin(queryId,query,self,task.isTransactionControl(),transactionManager,executor); stateMachine.setUpdateType(task.getName()); return new DataDeFinitionExecution<>(task,stateMachine); }
@Test public void testNameExpansion() { Session session = TestingSession.testSessionBuilder() .setIdentity(new Identity("bob",Optional.empty())) .setSource("the-internet") .build(); QueryQueueDeFinition deFinition = new QueryQueueDeFinition("user.${USER}",1,1); assertEquals(deFinition.getExpandedTemplate(session),"user.bob"); deFinition = new QueryQueueDeFinition("source.${SOURCE}","source.the-internet"); deFinition = new QueryQueueDeFinition("${USER}.${SOURCE}","bob.the-internet"); deFinition = new QueryQueueDeFinition("global","global"); }
private InternalTable buildTables(Session session,String catalogName,Map<String,NullableValue> filters) { Set<QualifiedobjectName> tables = ImmutableSet.copyOf(getTablesList(session,catalogName,filters)); Set<QualifiedobjectName> views = ImmutableSet.copyOf(getViewsList(session,filters)); InternalTable.Builder table = InternalTable.builder(informationSchemaTableColumns(TABLE_TABLES)); for (QualifiedobjectName name : union(tables,views)) { // if table and view names overlap,the view wins String type = views.contains(name) ? "VIEW" : "BASE TABLE"; table.add( name.getCatalogName(),name.getSchemaName(),name.getobjectName(),type); } return table.build(); }
@Test public void testStartTransactionExplicitModes() throws Exception { Session session = sessionBuilder() .setClientTransactionSupport() .build(); TransactionManager transactionManager = createTestTransactionManager(); QueryStateMachine stateMachine = QueryStateMachine.begin(new QueryId("query"),"START TRANSACTION",URI.create("fake://uri"),true,executor); Assert.assertFalse(stateMachine.getSession().getTransactionId().isPresent()); new StartTransactionTask().execute(new StartTransaction(ImmutableList.of(new Isolation(Isolation.Level.SERIALIZABLE),new TransactionAccessMode(true))),new AllowAllAccessControl(),stateMachine).join(); Assert.assertFalse(stateMachine.getQueryInfoWithoutDetails().isClearTransactionId()); Assert.assertTrue(stateMachine.getQueryInfoWithoutDetails().getStartedTransactionId().isPresent()); Assert.assertEquals(transactionManager.getAllTransactionInfos().size(),1); TransactionInfo transactionInfo = transactionManager.getTransactionInfo(stateMachine.getQueryInfoWithoutDetails().getStartedTransactionId().get()); Assert.assertEquals(transactionInfo.getIsolationLevel(),IsolationLevel.SERIALIZABLE); Assert.assertTrue(transactionInfo.isReadOnly()); Assert.assertFalse(transactionInfo.isAutoCommitContext()); }
@Test public void testCreateSession() throws Exception { HttpServletRequest request = new MockHttpServletRequest( ImmutableListMultimap.<String,String>builder() .put(PRESTO_USER,"testUser") .put(PRESTO_SOURCE,"testSource") .put(PRESTO_CATALOG,"testCatalog") .put(PRESTO_SCHEMA,"testSchema") .put(PRESTO_LANGUAGE,"zh-TW") .put(PRESTO_TIME_ZONE,"Asia/Taipei") .put(PRESTO_SESSION,QUERY_MAX_MEMORY + "=1GB") .put(PRESTO_SESSION,distributeD_JOIN + "=true," + HASH_PARTITION_COUNT + " = 43") .build(),"testRemote"); Session session = createSessionForRequest(request,new SessionPropertyManager(),new QueryId("test_query_id")); assertEquals(session.getQueryId(),new QueryId("test_query_id")); assertEquals(session.getUser(),"testUser"); assertEquals(session.getSource().get(),"testSource"); assertEquals(session.getCatalog().get(),"testCatalog"); assertEquals(session.getSchema().get(),"testSchema"); assertEquals(session.getLocale(),Locale.TAIWAN); assertEquals(session.getTimeZoneKey(),getTimeZoneKey("Asia/Taipei")); assertEquals(session.getRemoteUserAddress().get(),"testRemote"); assertEquals(session.getSystemProperties(),String>builder() .put(QUERY_MAX_MEMORY,"1GB") .put(distributeD_JOIN,"true") .put(HASH_PARTITION_COUNT,"43") .build()); }
public KafkaLoader(Producer<Long,Object> producer,String topicName,TestingPrestoServer prestoServer,Session defaultSession) { super(prestoServer,defaultSession); this.topicName = topicName; this.producer = producer; }
public Visitor(Metadata Metadata,sqlParser parser) { this.Metadata = Metadata; this.session = session; this.types = types; this.parser = parser; }
public static TaskContext createTaskContext(Executor executor,Session session) { return createTaskContext( checkNotSameThreadExecutor(executor,"executor is null"),new DataSize(256,MEGABYTE)); }
@Override public Optional<ColumnHandle> getSampleWeightColumnHandle(Session session,TableHandle tableHandle) { requireNonNull(tableHandle,"tableHandle is null"); ConnectorEntry entry = lookupConnectorFor(tableHandle); ConnectorMetadata Metadata = entry.getMetadata(session); ColumnHandle handle = Metadata.getSampleWeightColumnHandle(session.toConnectorSession(entry.getCatalog()),tableHandle.getConnectorHandle()); return Optional.ofNullable(handle); }
private static Session createSession(String schema) { return testSessionBuilder() .setCatalog("raptor") .setSchema(schema) .setSystemProperties(ImmutableMap.of("columnar_processing_dictionary","true","dictionary_aggregation","true")) .build(); }
private boolean getVerboseStats(Session session) { String verboseStats = session.getSystemProperties().get(VERBOSE_STATS_PROPERTY); if (verboseStats == null) { return this.verboseStats; } try { return Boolean.valueOf(verboseStats.toupperCase()); } catch (IllegalArgumentException e) { throw new PrestoException(NOT_SUPPORTED,"Invalid property '" + VERBOSE_STATS_PROPERTY + "=" + verboseStats + "'"); } }
@Override public Optional<Resolvedindex> resolveIndex(Session session,TableHandle tableHandle,Set<ColumnHandle> indexableColumns,Set<ColumnHandle> outputColumns,TupleDomain<ColumnHandle> tupleDomain) { ConnectorEntry entry = lookupConnectorFor(tableHandle); ConnectorMetadata Metadata = entry.getMetadata(session); ConnectorTransactionHandle transaction = entry.getTransactionHandle(session); ConnectorSession connectorSession = session.toConnectorSession(entry.getCatalog()); Optional<ConnectorResolvedindex> resolvedindex = Metadata.resolveIndex(connectorSession,tableHandle.getConnectorHandle(),indexableColumns,outputColumns,tupleDomain); return resolvedindex.map(resolved -> new Resolvedindex(tableHandle.getConnectorId(),transaction,resolved)); }
private Rewriter(SymbolAllocator symbolAllocator,PlanNodeIdAllocator idAllocator,Session session) { this.symbolAllocator = requireNonNull(symbolAllocator,"symbolAllocator is null"); this.idAllocator = requireNonNull(idAllocator,"idAllocator is null"); this.Metadata = requireNonNull(Metadata,"Metadata is null"); this.session = requireNonNull(session,"session is null"); }
@Test public void fieldLength() { Session session = testSessionBuilder() .setCatalog("blackhole") .setSchema("default") .build(); assertthatQueryReturnsValue( format("CREATE TABLE nation WITH ( %s = 8,%s = 1,%s = 1 ) as SELECT * FROM tpch.tiny.nation",FIELD_LENGTH_PROPERTY,ROWS_PER_PAGE_PROPERTY,PAGES_PER_SPLIT_PROPERTY,SPLIT_COUNT_PROPERTY),25L,session); MaterializedResult rows = queryRunner.execute(session,"SELECT * FROM nation"); assertEquals(rows.getRowCount(),1); MaterializedRow row = Iterables.getonlyElement(rows); assertEquals(row.getFieldCount(),4); assertEquals(row.getField(0),0L); assertEquals(row.getField(1),"********"); assertEquals(row.getField(2),0L); assertEquals(row.getField(3),"********"); assertthatQueryReturnsValue("DROP TABLE nation",true); }
public static void copyTpchTables( QueryRunner queryRunner,String sourceCatalog,String sourceSchema,Iterable<TpchTable<?>> tables) throws Exception { log.info("Loading data from %s.%s...",sourceCatalog,sourceSchema); long startTime = System.nanoTime(); for (TpchTable<?> table : tables) { copyTable(queryRunner,sourceSchema,table.getTableName().toLowerCase(ENGLISH),session); } log.info("Loading from %s.%s complete in %s",nanosSince(startTime).toString(SECONDS)); }
public Analyzer(Session session,Optional<QueryExplainer> queryExplainer,boolean experimentalSyntaxEnabled) { this.session = requireNonNull(session,"session is null"); this.Metadata = requireNonNull(Metadata,"accessControl is null"); this.queryExplainer = requireNonNull(queryExplainer,"query explainer is null"); this.experimentalSyntaxEnabled = experimentalSyntaxEnabled; }
public Rewriter(SymbolAllocator allocator,SymbolAllocator symbolAllocator,boolean distributedindexJoins,boolean distributedJoins,boolean preferStreamingOperators,boolean redistributeWrites) { this.allocator = allocator; this.idAllocator = idAllocator; this.symbolAllocator = symbolAllocator; this.session = session; this.distributedindexJoins = distributedindexJoins; this.distributedJoins = distributedJoins; this.preferStreamingOperators = preferStreamingOperators; this.redistributeWrites = redistributeWrites; }
@Override public ConnectorPageSink createPageSink(Session session,InsertTableHandle tableHandle) { // assumes connectorId and catalog are the same ConnectorSession connectorSession = session.toConnectorSession(tableHandle.getConnectorId()); return providerFor(tableHandle.getConnectorId()).createPageSink(tableHandle.getTransactionHandle(),connectorSession,tableHandle.getConnectorHandle()); }
private QueryStateMachine(QueryId queryId,URI self,boolean autoCommit,Executor executor) { this.queryId = requireNonNull(queryId,"queryId is null"); this.query = requireNonNull(query,"query is null"); this.session = requireNonNull(session,"session is null"); this.self = requireNonNull(self,"self is null"); this.autoCommit = autoCommit; this.transactionManager = requireNonNull(transactionManager,"transactionManager is null"); this.queryState = new StateMachine<>("query " + query,executor,QUEUED,TERMINAL_QUERY_STATES); }
public static Session createCassandraSession(String schema) { return testSessionBuilder() .setCatalog("cassandra") .setSchema(schema) .build(); }
com.facebook.presto.spi.PrestoException的实例源码
public T processjsonArray(JsonParser jsonParser) throws IOException { int currentIndex = 0; while (true) { JsonToken token = jsonParser.nextToken(); if (token == null) { throw new JsonParseException("Unexpected end of array",jsonParser.getCurrentLocation()); } if (token == END_ARRAY) { // Index out of bounds if (exceptionOnOutOfBounds) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT,"Index out of bounds"); } return null; } if (currentIndex == index) { break; } currentIndex++; jsonParser.skipChildren(); // Skip nested structure if currently at the start of one } return delegate.extract(jsonParser); }
@Override public boolean advanceNextPosition() { try { if (closed || !recordReader.hasNext()) { close(); return false; } row = (OrcStruct) recordReader.next(row); // reset loaded flags // partition keys are already loaded,but everything else is not System.arraycopy(isPartitionColumn,loaded,isPartitionColumn.length); return true; } catch (IOException | RuntimeException e) { closeWithSuppression(e); throw new PrestoException(HIVE_CURSOR_ERROR,e); } }
public WindowFunctionsupplier getwindowFunctionImplementation(Signature signature) { checkArgument(signature.getKind() == WINDOW || signature.getKind() == AGGREGATE,"%s is not a window function",signature); checkArgument(signature.getTypeParameterRequirements().isEmpty(),"%s has unbound type parameters",signature); Iterable<sqlFunction> candidates = functions.get(Qualifiedname.of(signature.getName())); // search for exact match for (sqlFunction operator : candidates) { Type returnType = typeManager.getType(signature.getReturnType()); List<Type> argumentTypes = resolveTypes(signature.getArgumentTypes(),typeManager); Map<String,Type> boundTypeParameters = operator.getSignature().bindTypeParameters(returnType,argumentTypes,false,typeManager); if (boundTypeParameters != null) { try { return specializedWindowCache.getUnchecked(new SpecializedFunctionKey(operator,boundTypeParameters,signature.getArgumentTypes().size())); } catch (UncheckedExecutionException e) { throw Throwables.propagate(e.getCause()); } } } throw new PrestoException(FUNCTION_IMPLEMENTATION_MISSING,format("%s not found",signature)); }
private Set<HivePrivilege> getPrivileges(String user,HiveObjectRef objectReference) { ImmutableSet.Builder<HivePrivilege> privileges = ImmutableSet.builder(); try (HivemetastoreClient client = clientProvider.createmetastoreClient()) { PrincipalPrivilegeSet privilegeSet = client.getPrivilegeSet(objectReference,user,null); if (privilegeSet != null) { Map<String,List<PrivilegeGrantInfo>> userPrivileges = privilegeSet.getUserPrivileges(); if (userPrivileges != null) { privileges.addAll(toGrants(userPrivileges.get(user))); } for (List<PrivilegeGrantInfo> rolePrivileges : privilegeSet.getRolePrivileges().values()) { privileges.addAll(toGrants(rolePrivileges)); } // We do not add the group permissions as Hive does not seem to process these } } catch (TException e) { throw new PrestoException(HIVE_metastore_ERROR,e); } return privileges.build(); }
public static Block toArray(Type arrayType,ConnectorSession connectorSession,Slice json) { try { List<?> array = (List<?>) stackRepresentationToObject(connectorSession,json,arrayType); if (array == null) { return null; } Type elementType = ((ArrayType) arrayType).getElementType(); BlockBuilder blockBuilder = elementType.createBlockBuilder(new BlockBuilderStatus(),array.size()); for (Object element : array) { appendToBlockBuilder(elementType,element,blockBuilder); } return blockBuilder.build(); } catch (RuntimeException e) { throw new PrestoException(INVALID_CAST_ARGUMENT,"Value cannot be cast to " + arrayType,e); } }
private int loadNodeId(String nodeIdentifier) { Integer id = dao.getNodeId(nodeIdentifier); if (id != null) { return id; } // creating a node is idempotent runIgnoringConstraintViolation(() -> dao.insertNode(nodeIdentifier)); id = dao.getNodeId(nodeIdentifier); if (id == null) { throw new PrestoException(INTERNAL_ERROR,"node does not exist after insert"); } return id; }
@Override public final void load(LazyBlock lazyBlock) { if (loaded) { return; } checkState(batchId == expectedBatchId); try { Block block = recordReader.readBlock(type,columnIndex); lazyBlock.setBlock(block); } catch (IOException e) { if (e instanceof OrcCorruptionException) { throw new PrestoException(HIVE_BAD_DATA,e); } throw new PrestoException(HIVE_CURSOR_ERROR,e); } loaded = true; }
@UsedByGeneratedCode public static Object subscript(MethodHandle keyEqualsMethod,Type keyType,Type valueType,Block map,Object key) { for (int position = 0; position < map.getPositionCount(); position += 2) { try { if ((boolean) keyEqualsMethod.invokeExact(keyType.getobject(map,position),key)) { return readNativeValue(valueType,map,position + 1); // position + 1: value position } } catch (Throwable t) { Throwables.propagateIfInstanceOf(t,Error.class); Throwables.propagateIfInstanceOf(t,PrestoException.class); throw new PrestoException(INTERNAL_ERROR,t); } } return null; }
@Override public void dropTable(ConnectorSession session,ConnectorTableHandle tableHandle) { HiveTableHandle handle = checkType(tableHandle,HiveTableHandle.class,"tableHandle"); SchemaTableName tableName = schemaTableName(tableHandle); if (!allowDropTable) { throw new PrestoException(PERMISSION_DENIED,"DROP TABLE is disabled in this Hive catalog"); } Optional<Table> target = metastore.getTable(handle.getSchemaName(),handle.getTableName()); if (!target.isPresent()) { throw new TableNotFoundException(tableName); } Table table = target.get(); if (!session.getUser().equals(table.getowner())) { throw new PrestoException(PERMISSION_DENIED,format("Unable to drop table '%s': owner of the table is different from session user",table)); } metastore.dropTable(handle.getSchemaName(),handle.getTableName()); }
@PreDestroy public void stop() { boolean queryCancelled = false; for (QueryExecution queryExecution : queries.values()) { QueryInfo queryInfo = queryExecution.getQueryInfo(); if (queryInfo.getState().isDone()) { continue; } log.info("Server shutting down. Query %s has been cancelled",queryExecution.getQueryInfo().getQueryId()); queryExecution.fail(new PrestoException(SERVER_SHUTTING_DOWN,"Server is shutting down. Query " + queryInfo.getQueryId() + " has been cancelled")); queryCancelled = true; } if (queryCancelled) { try { TimeUnit.SECONDS.sleep(5); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } queryManagementExecutor.shutdownNow(); queryExecutor.shutdownNow(); }
private void writeShard(UUID shardUuid) { if (backupStore.isPresent() && !backupExists(shardUuid)) { throw new PrestoException(RAPTOR_ERROR,"Backup does not exist after write"); } File stagingFile = storageService.getStagingFile(shardUuid); File storageFile = storageService.getStorageFile(shardUuid); storageService.createParents(storageFile); try { Files.move(stagingFile.toPath(),storageFile.toPath(),ATOMIC_MOVE); } catch (IOException e) { throw new PrestoException(RAPTOR_ERROR,"Failed to move shard file",e); } }
@UsedByGeneratedCode public static long arrayPosition(Type type,MethodHandle equalMethodHandle,Block array,Slice element) { int size = array.getPositionCount(); for (int i = 0; i < size; i++) { if (!array.isNull(i)) { Slice arrayValue = type.getSlice(array,i); try { if ((boolean) equalMethodHandle.invokeExact(arrayValue,element)) { return i + 1; // result is 1-based (instead of 0) } } catch (Throwable t) { Throwables.propagateIfInstanceOf(t,Error.class); Throwables.propagateIfInstanceOf(t,PrestoException.class); throw new PrestoException(INTERNAL_ERROR,t); } } } return 0; }
private List<ColumnInfo> getColumnInfo(OrcReader reader) { // Todo: These should be stored as proper Metadata. // XXX: Relying on ORC types will not work when more Presto types are supported. List<String> names = reader.getColumnNames(); Type rowType = getType(reader.getFooter().getTypes(),0); if (names.size() != rowType.getTypeParameters().size()) { throw new PrestoException(RAPTOR_ERROR,"Column names and types do not match"); } ImmutableList.Builder<ColumnInfo> list = ImmutableList.builder(); for (int i = 0; i < names.size(); i++) { list.add(new ColumnInfo(Long.parseLong(names.get(i)),rowType.getTypeParameters().get(i))); } return list.build(); }
public void appendRow(Row row) { List<Object> columns = row.getColumns(); checkArgument(columns.size() == columnTypes.size()); for (int channel = 0; channel < columns.size(); channeL++) { tableInspector.setStructFieldData(orcRow,structFields.get(channel),columns.get(channel)); } try { recordWriter.write(serializer.serialize(orcRow,tableInspector)); } catch (IOException e) { throw new PrestoException(RAPTOR_ERROR,"Failed to write record",e); } rowCount++; uncompressedSize += row.getSizeInBytes(); }
private RelationPlan createTableCreationPlan(Analysis analysis) { QualifiedobjectName destination = analysis.getCreateTableDestination().get(); RelationPlan plan = createRelationPlan(analysis); TableMetadata tableMetadata = createTableMetadata(destination,getoutputTableColumns(plan),analysis.getCreateTableProperties(),plan.getSampleWeight().isPresent()); if (plan.getSampleWeight().isPresent() && !Metadata.canCreateSampledTables(session,destination.getCatalogName())) { throw new PrestoException(NOT_SUPPORTED,"Cannot write sampled data to a store that doesn't support sampling"); } return createTableWriterPlan( analysis,plan,new CreateName(destination.getCatalogName(),tableMetadata),tableMetadata.getVisibleColumnNames()); }
@Override public final void load(LazyBlock lazyBlock) { if (loaded) { return; } checkState(batchId == expectedBatchId); try { Block block = recordReader.readBlock(type,columnIndex); lazyBlock.setBlock(block); } catch (IOException e) { throw new PrestoException(RAPTOR_ERROR,e); } loaded = true; }
@Test public void testAssignRandomNodeWhenBackupAvailable() throws InterruptedException,URISyntaxException { InMemoryNodeManager nodeManager = new InMemoryNodeManager(); RaptorConnectorId connectorId = new RaptorConnectorId("raptor"); Nodesupplier nodesupplier = new RaptorNodesupplier(nodeManager,connectorId); PrestoNode node = new PrestoNode(UUID.randomUUID().toString(),new URI("http://127.0.0.1/"),NodeVersion.UNKNowN); nodeManager.addNode(connectorId.toString(),node); RaptorSplitManager raptorSplitManagerWithBackup = new RaptorSplitManager(connectorId,nodesupplier,shardManager,true); deleteShardNodes(); ConnectorTableLayoutResult layout = getonlyElement(Metadata.getTableLayouts(SESSION,tableHandle,Constraint.alwaysTrue(),Optional.empty())); ConnectorSplitSource partitionSplit = getSplits(raptorSplitManagerWithBackup,layout); List<ConnectorSplit> batch = getFutureValue(partitionSplit.getNextBatch(1),PrestoException.class); assertEquals(getonlyElement(getonlyElement(batch).getAddresses()),node.getHostAndPort()); }
private String getFormattedsql(CreateView statement) { Query query = statement.getQuery(); String sql = formatsql(query); // verify round-trip Statement parsed; try { parsed = sqlParser.createStatement(sql); } catch (ParsingException e) { throw new PrestoException(INTERNAL_ERROR,"Formatted query does not parse: " + query); } if (!query.equals(parsed)) { throw new PrestoException(INTERNAL_ERROR,"Query does not round-trip: " + query); } return sql; }
@Test public void testRollback() { long tableId = createTable("test"); List<ColumnInfo> columns = ImmutableList.of(new ColumnInfo(1,BIGINT)); List<ShardInfo> shards = ImmutableList.of(shardInfo(UUID.randomUUID(),"node1")); shardManager.createTable(tableId,columns); long transactionId = shardManager.beginTransaction(); shardManager.rollbackTransaction(transactionId); try { shardManager.commitShards(transactionId,tableId,columns,shards,Optional.empty()); fail("expected exception"); } catch (PrestoException e) { assertEquals(e.getErrorCode(),TRANSACTION_CONFLICT.toErrorCode()); } }
@Test public void testTransactionAbort() throws Exception { // start table creation long transactionId = 1; ConnectorOutputTableHandle outputHandle = Metadata.beginCreateTable(SESSION,getordersTable()); // transaction is in progress assertTrue(transactionExists(transactionId)); assertNull(transactionSuccessful(transactionId)); // force transaction to abort shardManager.rollbackTransaction(transactionId); assertTrue(transactionExists(transactionId)); assertFalse(transactionSuccessful(transactionId)); // commit table creation try { Metadata.finishCreateTable(SESSION,outputHandle,ImmutableList.of()); fail("expected exception"); } catch (PrestoException e) { assertEquals(e.getErrorCode(),TRANSACTION_CONFLICT.toErrorCode()); } }
@Override public boolean advanceNextPosition() { try { if (closed || !recordReader.next(key,value)) { close(); return false; } // reset loaded flags // partition keys are already loaded,e); } }
@Override public void renaMetable(ConnectorSession session,ConnectorTableHandle tableHandle,SchemaTableName newTableName) { if (!alloWrenaMetable) { throw new PrestoException(PERMISSION_DENIED,"Renaming tables is disabled in this Hive catalog"); } HiveTableHandle handle = checkType(tableHandle,"tableHandle"); SchemaTableName tableName = schemaTableName(tableHandle); Optional<Table> source = metastore.getTable(handle.getSchemaName(),handle.getTableName()); if (!source.isPresent()) { throw new TableNotFoundException(tableName); } Table table = source.get(); table.setdbname(newTableName.getSchemaName()); table.setTableName(newTableName.getTableName()); metastore.alterTable(handle.getSchemaName(),handle.getTableName(),table); }
private static ScalarFunctionImplementation specializeArrayJoin(Map<String,Type> types,FunctionRegistry functionRegistry,List<Boolean> nullableArguments,Signature signature,MethodHandle methodHandle) { Type type = types.get("T"); if (type instanceof UnkNownType) { return new ScalarFunctionImplementation(false,nullableArguments,methodHandle.bindTo(null).bindTo(type),true); } else { try { ScalarFunctionImplementation castFunction = functionRegistry.getScalarFunctionImplementation(internalOperator(CAST.name(),VARCHAR_TYPE_SIGNATURE,ImmutableList.of(type.getTypeSignature()))); return new ScalarFunctionImplementation(false,methodHandle.bindTo(castFunction.getmethodHandle()).bindTo(type),true); } catch (PrestoException e) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT,format("Input type %s not supported",type),e); } } }
public static boolean equals(MethodHandle equalsFunction,Type type,Block leftArray,Block rightArray) { if (leftArray.getPositionCount() != rightArray.getPositionCount()) { return false; } for (int i = 0; i < leftArray.getPositionCount(); i++) { checkElementNotNull(leftArray.isNull(i),ARRAY_NULL_ELEMENT_MSG); checkElementNotNull(rightArray.isNull(i),ARRAY_NULL_ELEMENT_MSG); Object leftElement = readNativeValue(type,leftArray,i); Object rightElement = readNativeValue(type,rightArray,i); try { if (!(boolean) equalsFunction.invoke(leftElement,rightElement)) { return false; } } catch (Throwable t) { Throwables.propagateIfInstanceOf(t,PrestoException.class); throw new PrestoException(INTERNAL_ERROR,t); } } return true; }
@Override public long getLong() { if (isNull()) { return 0L; } switch (fieldType) { case BYTE: return value.get(); case SHORT: return value.getShort(); case INT: return value.getInt(); case LONG: return value.getLong(); default: throw new PrestoException(KInesIS_CONVERSION_NOT_SUPPORTED,format("conversion %s to long not supported",fieldType)); } }
@Override public void commitCreateTable(JdbcOutputTableHandle handle,Collection<Slice> fragments) { StringBuilder sql = new StringBuilder() .append("ALTER TABLE ") .append(quoted(handle.getCatalogName(),handle.getSchemaName(),handle.getTemporaryTableName())) .append(" RENAME TO ") .append(quoted(handle.getCatalogName(),handle.getTableName())); try (Connection connection = getConnection(handle)) { execute(connection,sql.toString()); } catch (sqlException e) { throw new PrestoException(JDBC_ERROR,e); } }
public static void input(MethodHandle methodHandle,NullableLongState state,long value) { if (state.isNull()) { state.setNull(false); state.setLong(value); return; } try { if ((boolean) methodHandle.invokeExact(value,state.getLong())) { state.setLong(value); } } catch (Throwable t) { Throwables.propagateIfInstanceOf(t,Error.class); Throwables.propagateIfInstanceOf(t,PrestoException.class); throw new PrestoException(INTERNAL_ERROR,t); } }
private void handleFailure(Throwable t) { // Can not delegate to other callback while holding a lock on this checkNotHoldsLock(); requestsFailed.incrementAndGet(); requestsCompleted.incrementAndGet(); if (t instanceof PrestoException) { clientCallback.clientFailed(HttpPageBufferClient.this,t); } synchronized (HttpPageBufferClient.this) { increaseErrorDelay(); future = null; lastUpdate = DateTime.Now(); } clientCallback.requestComplete(HttpPageBufferClient.this); }
public static void input(keyvaluePaiRSState state,Block key,Block value,int position) { keyvaluePairs pairs = state.get(); if (pairs == null) { pairs = new keyvaluePairs(state.getKeyType(),state.getValueType(),true); state.set(pairs); } long startSize = pairs.estimatedInMemorySize(); try { pairs.add(key,value,position,position); } catch (ExceededMemoryLimitException e) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT,format("The result of map_agg may not exceed %s",e.getMaxMemory())); } state.addMemoryUsage(pairs.estimatedInMemorySize() - startSize); }
@Override public long getLong() { if (isNull()) { return 0L; } switch (fieldType) { case BYTE: return value.get(); case SHORT: return value.getShort(); case INT: return value.getInt(); case LONG: return value.getLong(); default: throw new PrestoException(DECODER_CONVERSION_NOT_SUPPORTED,fieldType)); } }
public static void input(MethodHandle methodHandle,NullableDoubleState state,double value) { if (state.isNull()) { state.setNull(false); state.setDouble(value); return; } try { if ((boolean) methodHandle.invokeExact(value,state.getDouble())) { state.setDouble(value); } } catch (Throwable t) { Throwables.propagateIfInstanceOf(t,t); } }
private static DateTime parseDateTimeHelper(DateTimeFormatter formatter,String datetimeString) { try { return formatter.parseDateTime(datetimeString); } catch (IllegalArgumentException e) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT,e); } }
public static long timestampPartitionKey(String value,DateTimeZone zone,String name) { try { return parseHiveTimestamp(value,zone); } catch (IllegalArgumentException e) { throw new PrestoException(HIVE_INVALID_PARTITION_VALUE,format("Invalid partition value '%s' for TIMESTAMP partition key: %s",name)); } }
public static MethodHandle methodHandle(Class<?> clazz,String name,Class<?>... parameterTypes) { try { return MethodHandles.lookup().unreflect(clazz.getmethod(name,parameterTypes)); } catch (illegalaccessexception | NoSuchMethodException e) { throw new PrestoException(INTERNAL_ERROR,e); } }
public static Field field(Class<?> clazz,String name) { try { return clazz.getField(name); } catch (NoSuchFieldException e) { throw new PrestoException(INTERNAL_ERROR,e); } }
@Override public void renameColumn(ConnectorSession session,ColumnHandle source,String target) { if (!alloWrenameColumn) { throw new PrestoException(PERMISSION_DENIED,"Renaming columns is disabled in this Hive catalog"); } HiveTableHandle hiveTableHandle = checkType(tableHandle,"tableHandle"); HiveColumnHandle sourceHandle = checkType(source,HiveColumnHandle.class,"columnHandle"); Optional<Table> tableMetadata = metastore.getTable(hiveTableHandle.getSchemaName(),hiveTableHandle.getTableName()); if (!tableMetadata.isPresent()) { throw new TableNotFoundException(hiveTableHandle.getSchemaTableName()); } Table table = tableMetadata.get(); StorageDescriptor sd = table.getSd(); ImmutableList.Builder<FieldSchema> columns = ImmutableList.builder(); for (FieldSchema fieldSchema : sd.getCols()) { if (fieldSchema.getName().equals(sourceHandle.getName())) { columns.add(new FieldSchema(target,fieldSchema.getType(),fieldSchema.getComment())); } else { columns.add(fieldSchema); } } sd.setCols(columns.build()); table.setSd(sd); metastore.alterTable(hiveTableHandle.getSchemaName(),hiveTableHandle.getTableName(),table); }
@ScalarOperator(DIVIDE) @sqlType(StandardTypes.DOUBLE) public static double divide(@sqlType(StandardTypes.DOUBLE) double left,@sqlType(StandardTypes.DOUBLE) double right) { try { return left / right; } catch (ArithmeticException e) { throw new PrestoException(DIVISION_BY_ZERO,e); } }
@ScalarOperator(OperatorType.CAST) @sqlType(StandardTypes.JSON) public static Slice castvarcharToJson(@sqlType(StandardTypes.VARCHAR) Slice slice) throws IOException { // TEMPORARY: added to ease migrating user away from cast between json and varchar throw new PrestoException(NOT_SUPPORTED,"`CAST (varcharValue as JSON)` is removed. Use `JSON_PARSE(varcharValue)`."); }
private static int hexDigitCharToInt(byte b) { if (b >= '0' && b <= '9') { return b - '0'; } else if (b >= 'a' && b <= 'f') { return b - 'a' + 10; } else if (b >= 'A' && b <= 'F') { return b - 'A' + 10; } throw new PrestoException(INVALID_FUNCTION_ARGUMENT,"invalid hex character: " + (char) b); }
@Override public void restoreShard(UUID uuid,File target) { try { store.restoreShard(uuid,target); } catch (UncheckedTimeoutException e) { throw new PrestoException(RAPTOR_BACKUP_TIMEOUT,"Shard restore timed out"); } }
com.facebook.presto.sql.tree.AddColumn的实例源码
@Override protected Void visitAddColumn(AddColumn node,Integer indent) { builder.append("ALTER TABLE ") .append(node.getName()) .append(" ADD COLUMN ") .append(node.getColumn().getName()) .append(" ") .append(node.getColumn().getType()); return null; }
@Override public CompletableFuture<?> execute(AddColumn statement,TransactionManager transactionManager,Metadata Metadata,AccessControl accessControl,QueryStateMachine stateMachine) { Session session = stateMachine.getSession(); QualifiedobjectName tableName = createQualifiedobjectName(session,statement,statement.getName()); Optional<TableHandle> tableHandle = Metadata.getTableHandle(session,tableName); if (!tableHandle.isPresent()) { throw new SemanticException(MISSING_TABLE,"Table '%s' does not exist",tableName); } accessControl.checkCanAddColumns(session.getrequiredTransactionId(),session.getIdentity(),tableName); Map<String,ColumnHandle> columnHandles = Metadata.getColumnHandles(session,tableHandle.get()); TableElement element = statement.getColumn(); Type type = Metadata.getType(parseTypeSignature(element.getType())); if ((type == null) || type.equals(UNKNowN)) { throw new SemanticException(TYPE_MISMATCH,element,"UnkNown type for column '%s' ",element.getName()); } if (columnHandles.containsKey(element.getName())) { throw new SemanticException(COLUMN_ALREADY_EXISTS,"Column '%s' already exists",element.getName()); } Metadata.addColumn(session,tableHandle.get(),new ColumnMetadata(element.getName(),type,false)); return completedFuture(null); }
@Override protected Void visitAddColumn(AddColumn node,Integer indent) { builder.append("ALTER TABLE ") .append(node.getName()) .append(" ADD COLUMN ") .append(node.getColumn().getName()) .append(" ") .append(node.getColumn().getType()); return null; }
@Override protected Void visitAddColumn(AddColumn node,Integer indent) { builder.append("ALTER TABLE ") .append(node.getName()) .append(" ADD COLUMN ") .append(node.getColumn().getName()) .append(" ") .append(node.getColumn().getType()); return null; }
@Override public Node visitAddColumn(sqlbaseParser.AddColumnContext context) { return new AddColumn(getLocation(context),getQualifiedname(context.qualifiedname()),(TableElement) visit(context.tableElement())); }
@Test public void testAddColumn() throws Exception { assertStatement("ALTER TABLE foo.t ADD COLUMN c bigint",new AddColumn(Qualifiedname.of("foo","t"),new TableElement("c","bigint"))); }
com.facebook.presto.sql.tree.AliasedRelation的实例源码
@Override protected List<QuerySource> visitRelation(Relation node,QueryState state){ if(node instanceof Join){ return node.accept(this,state); }else if( node instanceof SampledRelation){ state.addException("Sampled relations are not supported"); return null; }else if( node instanceof AliasedRelation){ AliasedRelation ar = (AliasedRelation)node; state.setkeyvalue("table_alias",ar.getAlias()); List<QuerySource> relations = ar.getRelation().accept(this,state); for(QuerySource rr : relations) rr.setAlias(ar.getAlias()); return relations; }else if( node instanceof QueryBody){ return node.accept(this,state); }else{ state.addException("Unable to parse node because it has an unkNown type :"+node.getClass()); return null; } }
@Override protected RelationType visitAliasedRelation(AliasedRelation relation,AnalysisContext context) { RelationType child = process(relation.getRelation(),context); // todo this check should be inside of TupleDescriptor.withAlias,but the exception needs the node object if (relation.getColumnNames() != null) { int totalColumns = child.getVisibleFieldCount(); if (totalColumns != relation.getColumnNames().size()) { throw new SemanticException(MISMATCHED_COLUMN_ALIASES,relation,"Column alias list has %s entries but '%s' has %s columns available",relation.getColumnNames().size(),relation.getAlias(),totalColumns); } } RelationType descriptor = child.withAlias(relation.getAlias(),relation.getColumnNames()); analysis.setoutputDescriptor(relation,descriptor); return descriptor; }
@Override protected Void visitAliasedRelation(AliasedRelation node,Integer indent) { process(node.getRelation(),indent); builder.append(' ') .append(formatName(node.getAlias())); appendaliasColumns(builder,node.getColumnNames()); return null; }
@Override protected RelationPlan visitAliasedRelation(AliasedRelation node,Void context) { RelationPlan subPlan = process(node.getRelation(),context); RelationType outputDescriptor = analysis.getoutputDescriptor(node); return new RelationPlan(subPlan.getRoot(),outputDescriptor,subPlan.getoutputSymbols(),subPlan.getSampleWeight()); }
@Override protected Void visitAliasedRelation(AliasedRelation node,indent); builder.append(' ') .append(node.getAlias()); appendaliasColumns(builder,node.getColumnNames()); return null; }
@Override public Node visitAliasedRelation(sqlbaseParser.AliasedRelationContext context) { Relation child = (Relation) visit(context.relationPrimary()); if (context.identifier() == null) { return child; } return new AliasedRelation(getLocation(context),child,context.identifier().getText(),getColumnAliases(context.columnAliases())); }
@Override protected Void visitAliasedRelation(AliasedRelation node,node.getColumnNames()); return null; }
public static Relation aliased(Relation relation,String alias,List<String> columnAliases) { return new AliasedRelation(relation,alias,columnAliases); }
我们今天的关于com.facebook.presto.sql.tree.Expression的实例源码的分享就到这里,谢谢您的阅读,如果想了解更多关于com.facebook.presto.Session的实例源码、com.facebook.presto.spi.PrestoException的实例源码、com.facebook.presto.sql.tree.AddColumn的实例源码、com.facebook.presto.sql.tree.AliasedRelation的实例源码的相关信息,可以在本站进行搜索。
本文标签: