Java源码示例:org.apache.hadoop.hive.ql.parse.HiveParser
示例1
private void parseQL(ASTNode ast) {
switch (ast.getType()) {
case HiveParser.TOK_QUERY:
visitSubtree(ast);
break;
case HiveParser.TOK_UPDATE_TABLE:
setOperation("UPDATE");
visitSubtree(ast);
break;
case HiveParser.TOK_DELETE_FROM:
setOperation("DELETE FROM");
visitSubtree(ast);
break;
default:
LOG.error("Unsupporting query operation " + ast.getType());
throw new IllegalStateException("Query operation is not supported "
+ ast.getType());
}
}
示例2
private void visitSubtree(ASTNode ast) {
int len = ast.getChildCount();
if (len > 0) {
for (Node n : ast.getChildren()) {
ASTNode asn = (ASTNode)n;
switch (asn.getToken().getType()) {
case HiveParser.TOK_TABNAME:
tableSet.add(ast.getChild(0).getChild(0).getText());
break;
case HiveParser.TOK_SET_COLUMNS_CLAUSE:
for (int i = 0; i < asn.getChildCount(); i++) {
addToColumnSet((ASTNode)asn.getChild(i).getChild(0));
}
case HiveParser.TOK_FROM:
parseFromClause((ASTNode)asn.getChild(0));
break;
case HiveParser.TOK_INSERT:
for (int i = 0; i < asn.getChildCount(); i++) {
parseInsertClause((ASTNode)asn.getChild(i));
}
break;
case HiveParser.TOK_UNIONTYPE:
int childcount = asn.getChildCount();
for (int i = 0; i < childcount; i++) {
parseQL((ASTNode)asn.getChild(i));
}
break;
}
}
// Add tableSet and columnSet to tableColumnMap
addTablesColumnsToMap(tableSet, columnSet);
}
}
示例3
private void parseSubQuery(ASTNode subQuery) {
int cc = 0;
int cp = 0;
switch (subQuery.getToken().getType()) {
case HiveParser.TOK_QUERY:
visitSubtree(subQuery);
break;
case HiveParser.TOK_UNIONTYPE:
cc = subQuery.getChildCount();
for ( cp = 0; cp < cc; ++cp) {
parseSubQuery((ASTNode)subQuery.getChild(cp));
}
break;
}
}
示例4
private void parseQueryClause(ASTNode ast) {
int len = ast.getChildCount();
if (len > 0) {
for (Node n : ast.getChildren()) {
ASTNode asn = (ASTNode) n;
switch (asn.getToken().getType()) {
case HiveParser.TOK_FROM:
parseFromClause((ASTNode) asn.getChild(0));
break;
case HiveParser.TOK_INSERT:
for (int i = 0; i < asn.getChildCount(); i++) {
parseInsertClause((ASTNode) asn.getChild(i));
}
break;
}
}
}
}
示例5
private void parseTokFunction(ASTNode ast, Set<String> set) {
switch(ast.getType()) {
case HiveParser.TOK_TABLE_OR_COL:
String colRealName = convAliasToReal(columnAliasMap, ast.getChild(0).getText());
set.add(colRealName);
break;
case HiveParser.TOK_FUNCTION:
for (int i = 0; i < ast.getChildCount(); i++) {
ASTNode n = (ASTNode)ast.getChild(i);
if (n != null) {
parseTokFunction(n, set);
}
}
break;
}
}
示例6
@Override
public Task<? extends Serializable> createShowRoleGrantTask(ASTNode ast, Path resultFile,
HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
ASTNode child = (ASTNode) ast.getChild(0);
PrincipalType principalType = PrincipalType.USER;
switch (child.getType()) {
case HiveParser.TOK_USER:
principalType = PrincipalType.USER;
break;
case HiveParser.TOK_GROUP:
principalType = PrincipalType.GROUP;
break;
case HiveParser.TOK_ROLE:
principalType = PrincipalType.ROLE;
break;
}
if (principalType != PrincipalType.GROUP) {
String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + principalType;
throw new SemanticException(msg);
}
String principalName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
RoleDDLDesc roleDesc = new RoleDDLDesc(principalName, principalType,
RoleDDLDesc.RoleOperation.SHOW_ROLE_GRANT, null);
roleDesc.setResFile(resultFile.toString());
return createTask(new DDLWork(inputs, outputs, roleDesc));
}
示例7
@Test
public void typical() throws Exception {
ArrayList<Node> children = new ArrayList<>();
children.add(dbNode);
children.add(tableNode);
when(node.getChildren()).thenReturn(children);
when(node.getChildCount()).thenReturn(children.size());
when(node.getChild(0)).thenReturn(dbNode);
when(node.getChild(1)).thenReturn(tableNode);
when(node.getType()).thenReturn(HiveParser.TOK_TABNAME);
when(token.getText()).thenReturn("TOK_TABNAME");
processor.process(node, null, null);
assertThat(processor.getTables(), is(Arrays.asList(DATABASE + "." + TABLE)));
}
示例8
@Test
public void unqualifiedTableName() throws Exception {
ArrayList<Node> children = new ArrayList<>();
children.add(tableNode);
when(node.getChildren()).thenReturn(children);
when(node.getChildCount()).thenReturn(children.size());
when(node.getChild(0)).thenReturn(tableNode);
when(node.getType()).thenReturn(HiveParser.TOK_TABNAME);
when(token.getText()).thenReturn("TOK_TABNAME");
when(SessionState.get()).thenReturn(sessionState);
processor.process(node, null, null);
assertThat(processor.getTables(), is(Arrays.asList(DEFAULT_DATABASE + "." + TABLE)));
}
示例9
@Test(expected = NullPointerException.class)
public void unqualifiedTableNameFailsIsSessionIsNotSet() throws Exception {
ArrayList<Node> children = new ArrayList<>();
children.add(tableNode);
when(node.getChildren()).thenReturn(children);
when(node.getChildCount()).thenReturn(children.size());
when(node.getChild(0)).thenReturn(tableNode);
when(node.getType()).thenReturn(HiveParser.TOK_TABNAME);
when(token.getText()).thenReturn("TOK_TABNAME");
processor.process(node, null, null);
}
示例10
private void parseTokFunction(ASTNode ast, Set<String> set) {
if (ast.getType() == HiveParser.TOK_TABLE_OR_COL) {
String colRealName = convAliasToReal(columnAliasMap, ast.getChild(0).getText());
set.add(colRealName);
}
for (int i = 0; i < ast.getChildCount(); i++) {
ASTNode n = (ASTNode)ast.getChild(i);
if (n != null) {
parseTokFunction(n, set);
}
}
}
示例11
private void parseQL(ASTNode ast) {
switch (ast.getType()) {
case HiveParser.TOK_QUERY:
parseQueryClause(ast);
addTablesColumnsToMap(tableSet, columnSet);
break;
case HiveParser.TOK_UPDATE_TABLE:
setOperation("UPDATE");
visitSubtree(ast);
break;
case HiveParser.TOK_DELETE_FROM:
setOperation("DELETE");
visitSubtree(ast);
break;
case HiveParser.TOK_CREATETABLE:
setOperation("CREATE");
visitSubtree(ast);
break;
case HiveParser.TOK_DROPTABLE:
setOperation("DROP");
visitSubtree(ast);
break;
case HiveParser.TOK_ALTERTABLE:
setOperation("ALTER");
visitSubtree(ast);
break;
default:
LOG.error("Unsupported query operation " + ast.getText());
throw new IllegalStateException("Query operation is not supported "
+ ast.getText());
}
}
示例12
private void visitSubtree(ASTNode ast) {
int len = ast.getChildCount();
if (len > 0) {
for (Node n : ast.getChildren()) {
ASTNode asn = (ASTNode)n;
switch (asn.getToken().getType()) {
case HiveParser.TOK_TABNAME:
//tableSet.add(ast.getChild(0).getChild(0).getText());
parserContent.getTableColumnMap().put(ast.getChild(0).getChild(0).getText(), new HashSet<>(columnSet));
break;
case HiveParser.TOK_SET_COLUMNS_CLAUSE:
for (int i = 0; i < asn.getChildCount(); i++) {
addToColumnSet((ASTNode) asn.getChild(i).getChild(0));
}
break;
case HiveParser.TOK_QUERY:
parseQueryClause(asn);
break;
case HiveParser.TOK_UNIONTYPE:
case HiveParser.TOK_UNIONALL:
case HiveParser.TOK_UNIONDISTINCT:
visitSubtree(asn);
break;
}
}
// Add tableSet and columnSet to tableColumnMap
addTablesColumnsToMap(tableSet, columnSet);
}
}
示例13
@Override
public Task<? extends Serializable> createGrantTask(ASTNode ast, HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) throws SemanticException {
List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef(
(ASTNode) ast.getChild(0));
List<PrincipalDesc> principalDesc = analyzePrincipalListDef(
(ASTNode) ast.getChild(1));
SentryHivePrivilegeObjectDesc privilegeObj = null;
boolean grantOption = false;
if (ast.getChildCount() > 2) {
for (int i = 2; i < ast.getChildCount(); i++) {
ASTNode astChild = (ASTNode) ast.getChild(i);
if (astChild.getType() == HiveParser.TOK_GRANT_WITH_OPTION) {
grantOption = true;
} else if (astChild.getType() == HiveParser.TOK_PRIV_OBJECT) {
privilegeObj = analyzePrivilegeObject(astChild);
}
}
}
String userName = null;
if (SessionState.get() != null
&& SessionState.get().getAuthenticator() != null) {
userName = SessionState.get().getAuthenticator().getUserName();
}
Preconditions.checkNotNull(privilegeObj, "privilegeObj is null for " + ast.dump());
if (privilegeObj.getPartSpec() != null) {
throw new SemanticException(SentryHiveConstants.PARTITION_PRIVS_NOT_SUPPORTED);
}
for (PrincipalDesc princ : principalDesc) {
if (princ.getType() != PrincipalType.ROLE) {
String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + princ.getType();
throw new SemanticException(msg);
}
}
GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc,
principalDesc, userName, PrincipalType.USER, grantOption);
return createTask(new DDLWork(inputs, outputs, grantDesc));
}
示例14
@Override
public Task<? extends Serializable> createShowGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) throws SemanticException {
SentryHivePrivilegeObjectDesc privHiveObj = null;
ASTNode principal = (ASTNode) ast.getChild(0);
PrincipalType type = PrincipalType.USER;
switch (principal.getType()) {
case HiveParser.TOK_USER:
type = PrincipalType.USER;
break;
case HiveParser.TOK_GROUP:
type = PrincipalType.GROUP;
break;
case HiveParser.TOK_ROLE:
type = PrincipalType.ROLE;
break;
}
if (type != PrincipalType.ROLE) {
String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + type;
throw new SemanticException(msg);
}
String principalName = BaseSemanticAnalyzer.unescapeIdentifier(principal.getChild(0).getText());
PrincipalDesc principalDesc = new PrincipalDesc(principalName, type);
// Partition privileges are not supported by Sentry
if (ast.getChildCount() > 1) {
ASTNode child = (ASTNode) ast.getChild(1);
if (child.getToken().getType() == HiveParser.TOK_PRIV_OBJECT_COL) {
privHiveObj = analyzePrivilegeObject(child);
} else {
throw new SemanticException("Unrecognized Token: " + child.getToken().getType());
}
}
ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(),
principalDesc, privHiveObj);
return createTask(new DDLWork(inputs, outputs, showGrant));
}
示例15
private List<PrincipalDesc> analyzePrincipalListDef(ASTNode node) {
List<PrincipalDesc> principalList = new ArrayList<PrincipalDesc>();
for (int i = 0; i < node.getChildCount(); i++) {
ASTNode child = (ASTNode) node.getChild(i);
PrincipalType type = null;
switch (child.getType()) {
case 880:
type = PrincipalType.USER;
break;
case HiveParser.TOK_USER:
type = PrincipalType.USER;
break;
case 685:
type = PrincipalType.GROUP;
break;
case HiveParser.TOK_GROUP:
type = PrincipalType.GROUP;
break;
case 782:
type = PrincipalType.ROLE;
break;
case HiveParser.TOK_ROLE:
type = PrincipalType.ROLE;
break;
}
String principalName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
PrincipalDesc principalDesc = new PrincipalDesc(principalName, type);
LOG.debug("## Principal : [ " + principalName + ", " + type + "]");
principalList.add(principalDesc);
}
return principalList;
}
示例16
@VisibleForTesting
protected static AccessURI extractPartition(ASTNode ast) throws SemanticException {
for (int i = 0; i < ast.getChildCount(); i++) {
ASTNode child = (ASTNode)ast.getChild(i);
if (child.getToken().getType() == HiveParser.TOK_PARTITIONLOCATION &&
child.getChildCount() == 1) {
return parseURI(BaseSemanticAnalyzer.
unescapeSQLString(child.getChild(0).getText()));
}
}
return null;
}