Java源码示例:org.apache.zeppelin.interpreter.InterpreterOutput

示例1
@Before
public void setUp() throws Exception {
  Class.forName("org.h2.Driver");
  Connection connection = DriverManager.getConnection(getJdbcConnection());
  Statement statement = connection.createStatement();
  statement.execute(
      "DROP TABLE IF EXISTS test_table; " +
      "CREATE TABLE test_table(id varchar(255), name varchar(255));");

  PreparedStatement insertStatement = connection.prepareStatement(
          "insert into test_table(id, name) values ('a', 'a_name'),('b', 'b_name'),('c', ?);");
  insertStatement.setString(1, null);
  insertStatement.execute();
  context = InterpreterContext.builder()
      .setAuthenticationInfo(new AuthenticationInfo("testUser"))
      .setParagraphId("paragraphId")
      .setInterpreterOut(new InterpreterOutput(null))
      .build();
}
 
示例2
private InterpreterContext convert(RemoteInterpreterContext ric, InterpreterOutput output) {
  return InterpreterContext.builder()
      .setNoteId(ric.getNoteId())
      .setNoteName(ric.getNoteName())
      .setParagraphId(ric.getParagraphId())
      .setReplName(ric.getReplName())
      .setParagraphTitle(ric.getParagraphTitle())
      .setParagraphText(ric.getParagraphText())
      .setLocalProperties(ric.getLocalProperties())
      .setAuthenticationInfo(AuthenticationInfo.fromJson(ric.getAuthenticationInfo()))
      .setGUI(GUI.fromJson(ric.getGui()))
      .setConfig(gson.fromJson(ric.getConfig(),
                 new TypeToken<Map<String, Object>>() {}.getType()))
      .setNoteGUI(GUI.fromJson(ric.getNoteGui()))
      .setAngularObjectRegistry(interpreterGroup.getAngularObjectRegistry())
      .setResourcePool(interpreterGroup.getResourcePool())
      .setInterpreterOut(output)
      .setIntpEventClient(intpEventClient)
      .setProgressMap(progressMap)
      .build();
}
 
示例3
@Before
public void setUp() throws Exception {
  Properties p = new Properties();
  p.setProperty("zeppelin.python", "python");
  p.setProperty("zeppelin.python.maxResult", "100");
  p.setProperty("zeppelin.python.useIPython", "false");

  intpGroup = new InterpreterGroup();

  python = new PythonInterpreter(p);
  python.setInterpreterGroup(intpGroup);

  List<Interpreter> interpreters = new LinkedList<>();
  interpreters.add(python);
  intpGroup.put("note", interpreters);

  out = new InterpreterOutput(this);

  context = InterpreterContext.builder()
      .setInterpreterOut(out)
      .setAngularObjectRegistry(new AngularObjectRegistry(intpGroup.getId(), null))
      .build();
  InterpreterContext.set(context);

  python.open();
}
 
示例4
@Before
public void setUp() throws InterpreterException {
  docker = spy(new PythonDockerInterpreter(new Properties()));
  python = mock(PythonInterpreter.class);

  InterpreterGroup group = new InterpreterGroup();
  group.put("note", Arrays.asList(python, docker));
  python.setInterpreterGroup(group);
  docker.setInterpreterGroup(group);

  doReturn(true).when(docker).pull(any(InterpreterOutput.class), anyString());
  doReturn(new File("/scriptpath")).when(python).getPythonWorkDir();
  doReturn(PythonDockerInterpreter.class.getName()).when(docker).getClassName();
  doReturn(PythonInterpreter.class.getName()).when(python).getClassName();
  docker.open();
}
 
示例5
@Override
public InterpreterResult interpret(String st, InterpreterContext context)
    throws InterpreterException {

  if (isSparkVersionUnsupported()) {
    return unsupportedMessage;
  }

  z.setInterpreterContext(context);
  z.setGui(context.getGui());
  z.setNoteGui(context.getNoteGui());
  InterpreterContext.set(context);

  jsc.setJobGroup(buildJobGroupId(context), buildJobDesc(context), false);
  jsc.setLocalProperty("spark.scheduler.pool", context.getLocalProperties().get("pool"));

  InterpreterOutput out = context.out;
  PrintStream scalaOut = Console.out();
  PrintStream newOut = (out != null) ? new PrintStream(out) : null;

  Console.setOut(newOut);
  InterpreterResult result = interpreter.interpret(st, context);
  Console.setOut(scalaOut);

  return result;
}
 
示例6
@BeforeClass
public static void setUp() throws Exception {
  intpGroup = new InterpreterGroup();
  intpGroup.put("note", new LinkedList<Interpreter>());
  context = InterpreterContext.builder()
      .setNoteId("note")
      .setInterpreterOut(new InterpreterOutput(null))
      .setIntpEventClient(mock(RemoteInterpreterEventClient.class))
      .setAngularObjectRegistry(new AngularObjectRegistry(intpGroup.getId(), null))
      .build();
  InterpreterContext.set(context);

  sparkInterpreter = new SparkInterpreter(getPySparkTestProperties());
  intpGroup.get("note").add(sparkInterpreter);
  sparkInterpreter.setInterpreterGroup(intpGroup);
  sparkInterpreter.open();

  pyspark = new AltPySparkInterpreter(getPySparkTestProperties());
  intpGroup.get("note").add(pyspark);
  pyspark.setInterpreterGroup(intpGroup);
  pyspark.open();
}
 
示例7
@Test
public void testSingleRowResult() throws InterpreterException {
  sparkInterpreter.interpret("case class P(age:Int)", context);
  sparkInterpreter.interpret(
          "val gr = sc.parallelize(Seq(P(1),P(2),P(3),P(4),P(5),P(6),P(7),P(8),P(9),P(10)))",
          context);
  sparkInterpreter.interpret("gr.toDF.registerTempTable(\"gr\")", context);

  context = InterpreterContext.builder()
          .setNoteId("noteId")
          .setParagraphId("paragraphId")
          .setParagraphTitle("title")
          .setAngularObjectRegistry(new AngularObjectRegistry(intpGroup.getId(), null))
          .setResourcePool(new LocalResourcePool("id"))
          .setInterpreterOut(new InterpreterOutput(null))
          .setIntpEventClient(mock(RemoteInterpreterEventClient.class))
          .build();
  context.getLocalProperties().put("template", "Total count: <h1>{0}</h1>, Total age: <h1>{1}</h1>");

  InterpreterResult ret = sqlInterpreter.interpret("select count(1), sum(age) from gr", context);
  context.getLocalProperties().remove("template");
  assertEquals(InterpreterResult.Code.SUCCESS, ret.code());
  assertEquals(Type.HTML, ret.message().get(0).getType());
  assertEquals("Total count: <h1>10</h1>, Total age: <h1>55</h1>", ret.message().get(0).getData());
}
 
示例8
protected InterpreterContext getIntpContext() {
  if (this.interpreterContext == null) {
    this.interpreterContext = InterpreterContext.builder()
            .setInterpreterOut(new InterpreterOutput(null))
            .build();
    InterpreterContext.set(this.interpreterContext);
  }
  return this.interpreterContext;
}
 
示例9
@Override
public void open() throws InterpreterException {
  this.sparkInterpreter = getInterpreterInTheSameSessionByClassName(LivySparkInterpreter.class);
  // As we don't know whether livyserver use spark2 or spark1, so we will detect SparkSession
  // to judge whether it is using spark2.
  try {
    InterpreterContext context = InterpreterContext.builder()
        .setInterpreterOut(new InterpreterOutput(null))
        .build();
    InterpreterResult result = sparkInterpreter.interpret("spark", context);
    if (result.code() == InterpreterResult.Code.SUCCESS &&
        result.message().get(0).getData().contains("org.apache.spark.sql.SparkSession")) {
      LOGGER.info("SparkSession is detected so we are using spark 2.x for session {}",
          sparkInterpreter.getSessionInfo().id);
      isSpark2 = true;
    } else {
      // spark 1.x
      result = sparkInterpreter.interpret("sqlContext", context);
      if (result.code() == InterpreterResult.Code.SUCCESS) {
        LOGGER.info("sqlContext is detected.");
      } else if (result.code() == InterpreterResult.Code.ERROR) {
        // create SqlContext if it is not available, as in livy 0.2 sqlContext
        // is not available.
        LOGGER.info("sqlContext is not detected, try to create SQLContext by ourselves");
        result = sparkInterpreter.interpret(
            "val sqlContext = new org.apache.spark.sql.SQLContext(sc)\n"
                + "import sqlContext.implicits._", context);
        if (result.code() == InterpreterResult.Code.ERROR) {
          throw new LivyException("Fail to create SQLContext," +
              result.message().get(0).getData());
        }
      }
    }
  } catch (LivyException e) {
    throw new RuntimeException("Fail to Detect SparkVersion", e);
  }
}
 
示例10
@Before
public void setUpZeppelin() {
  Properties p = new Properties();
  p.setProperty(Neo4jConnectionManager.NEO4J_SERVER_URL, server.boltURI().toString());
  p.setProperty(Neo4jConnectionManager.NEO4J_AUTH_TYPE, Neo4jAuthType.NONE.toString());
  p.setProperty(Neo4jConnectionManager.NEO4J_MAX_CONCURRENCY, "50");
  interpreter = new Neo4jCypherInterpreter(p);
  context = InterpreterContext.builder()
      .setInterpreterOut(new InterpreterOutput(null))
      .build();
}
 
示例11
private InterpreterContext getInterpreterContext() {
  return InterpreterContext.builder()
          .setParagraphId("paragraph_1")
          .setAuthenticationInfo(new AuthenticationInfo("testUser"))
          .setResourcePool(resourcePool)
          .setInterpreterOut(new InterpreterOutput(null))
          .build();
}
 
示例12
@Test
public void testMultiTenant_2() throws IOException, InterpreterException {
  // user1 %hive  select from default db
  // user2 %hive  select from default db
  Properties properties = getDBProperty("default", "", "");
  JDBCInterpreter jdbc = new JDBCInterpreter(properties);
  AuthenticationInfo user1Credential = getUserAuth("user1", "hive", "user1Id", "user1Pw");
  AuthenticationInfo user2Credential = getUserAuth("user2", "hive", "user2Id", "user2Pw");
  jdbc.open();

  // user1 runs default
  InterpreterContext context = InterpreterContext.builder()
          .setAuthenticationInfo(user1Credential)
          .setInterpreterOut(new InterpreterOutput(null))
          .setReplName("hive")
          .build();
  jdbc.interpret("", context);

  JDBCUserConfigurations user1JDBC1Conf = jdbc.getJDBCConfiguration("user1");
  assertEquals("user1Id", user1JDBC1Conf.getPropertyMap("default").get("user"));
  assertEquals("user1Pw", user1JDBC1Conf.getPropertyMap("default").get("password"));

  // user2 run default
  context = InterpreterContext.builder()
          .setAuthenticationInfo(user2Credential)
          .setInterpreterOut(new InterpreterOutput(null))
          .setReplName("hive")
          .build();
  jdbc.interpret("", context);

  JDBCUserConfigurations user2JDBC1Conf = jdbc.getJDBCConfiguration("user2");
  assertEquals("user2Id", user2JDBC1Conf.getPropertyMap("default").get("user"));
  assertEquals("user2Pw", user2JDBC1Conf.getPropertyMap("default").get("password"));

  jdbc.close();
}
 
示例13
@Test
public void testPrecodeWithAnotherPrefix() throws IOException,
        InterpreterException {
  Properties properties = new Properties();
  properties.setProperty("anotherPrefix.driver", "org.h2.Driver");
  properties.setProperty("anotherPrefix.url", getJdbcConnection());
  properties.setProperty("anotherPrefix.user", "");
  properties.setProperty("anotherPrefix.password", "");
  properties.setProperty(String.format(PRECODE_KEY_TEMPLATE, "anotherPrefix"),
          "create table test_precode_2 (id int); insert into test_precode_2 values (2);");
  JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(properties);
  jdbcInterpreter.open();

  Map<String, String> localProperties = new HashMap<>();
  localProperties.put("db", "anotherPrefix");
  InterpreterContext context = InterpreterContext.builder()
      .setAuthenticationInfo(new AuthenticationInfo("testUser"))
      .setInterpreterOut(new InterpreterOutput(null))
      .setLocalProperties(localProperties)
      .build();
  jdbcInterpreter.executePrecode(context);

  String sqlQuery = "select * from test_precode_2";

  InterpreterResult interpreterResult = jdbcInterpreter.interpret(sqlQuery, context);

  List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
  assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());

  assertEquals(3, resultMessages.size());
  assertEquals(InterpreterResult.Type.TEXT, resultMessages.get(0).getType());
  assertEquals("Query executed successfully. Affected rows : 0\n",
          resultMessages.get(0).getData());
  assertEquals(InterpreterResult.Type.TEXT, resultMessages.get(1).getType());
  assertEquals("Query executed successfully. Affected rows : 1\n",
          resultMessages.get(1).getData());
  assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(2).getType());
  assertEquals("ID\n2\n", resultMessages.get(2).getData());
}
 
示例14
@Test
public void testStatementPrecodeWithAnotherPrefix() throws IOException,
        InterpreterException {
  Properties properties = new Properties();
  properties.setProperty("anotherPrefix.driver", "org.h2.Driver");
  properties.setProperty("anotherPrefix.url", getJdbcConnection());
  properties.setProperty("anotherPrefix.user", "");
  properties.setProperty("anotherPrefix.password", "");
  properties.setProperty(String.format(STATEMENT_PRECODE_KEY_TEMPLATE, "anotherPrefix"),
          "set @v='statementAnotherPrefix'");
  JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(properties);
  jdbcInterpreter.open();

  Map<String, String> localProperties = new HashMap<>();
  localProperties.put("db", "anotherPrefix");
  InterpreterContext context = InterpreterContext.builder()
      .setAuthenticationInfo(new AuthenticationInfo("testUser"))
      .setInterpreterOut(new InterpreterOutput(null))
      .setLocalProperties(localProperties)
      .build();

  String sqlQuery = "select @v";

  InterpreterResult interpreterResult = jdbcInterpreter.interpret(sqlQuery, context);

  assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
  List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
  assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType());
  assertEquals("@V\nstatementAnotherPrefix\n", resultMessages.get(0).getData());
}
 
示例15
private InterpreterContext getInterpreterContext() {
  return InterpreterContext.builder()
          .setAuthenticationInfo(new AuthenticationInfo("testUser"))
          .setParagraphId("paragraphId")
          .setInterpreterOut(new InterpreterOutput(null))
          .build();
}
 
示例16
protected InterpreterContext getInterpreterContext() {
  Map<String, String> localProperties = new HashMap<>();
  localProperties.put("kernel", "python");
  return InterpreterContext.builder()
          .setNoteId("noteId")
          .setParagraphId("paragraphId")
          .setInterpreterOut(new InterpreterOutput(null))
          .setIntpEventClient(mock(RemoteInterpreterEventClient.class))
          .setLocalProperties(localProperties)
          .setResourcePool(resourcePool)
          .build();
}
 
示例17
protected InterpreterContext getInterpreterContext() {
  Map<String, String> localProperties = new HashMap<>();
  localProperties.put("kernel", "ir");
  InterpreterContext context = InterpreterContext.builder()
          .setNoteId("note_1")
          .setParagraphId("paragraph_1")
          .setInterpreterOut(new InterpreterOutput(null))
          .setLocalProperties(localProperties)
          .build();
  return context;
}
 
示例18
public ApplicationContext(String noteId,
                          String paragraphId,
                          String applicationInstanceId,
                          HeliumAppAngularObjectRegistry angularObjectRegistry,
                          InterpreterOutput out) {
  this.noteId = noteId;
  this.paragraphId = paragraphId;
  this.applicationInstanceId = applicationInstanceId;
  this.angularObjectRegistry = angularObjectRegistry;
  this.out = out;
}
 
示例19
private ApplicationContext getApplicationContext(
    HeliumPackage packageInfo, String noteId, String paragraphId, String applicationInstanceId) {
  InterpreterOutput out = createAppOutput(noteId, paragraphId, applicationInstanceId);
  return new ApplicationContext(
      noteId,
      paragraphId,
      applicationInstanceId,
      new HeliumAppAngularObjectRegistry(angularObjectRegistry, noteId, applicationInstanceId),
      out);
}
 
示例20
public ApplicationContext createContext(String noteId, String paragraphId, String appInstanceId) {
  ApplicationContext context1 = new ApplicationContext(
      noteId,
      paragraphId,
      appInstanceId,
      null,
      new InterpreterOutput(null));
  return context1;
}
 
示例21
private void runCondaHelp(InterpreterOutput out) {
  try {
    out.setType(InterpreterResult.Type.HTML);
    out.writeResource("output_templates/conda_usage.html");
  } catch (IOException e) {
    logger.error("Can't print usage", e);
  }
}
 
示例22
private void printUsage(InterpreterOutput out) {
  try {
    out.setType(InterpreterResult.Type.HTML);
    out.writeResource("output_templates/docker_usage.html");
  } catch (IOException e) {
    logger.error("Can't print usage", e);
  }
}
 
示例23
public boolean pull(InterpreterOutput out, String image) throws InterpreterException {
  int exit = 0;
  try {
    exit = runCommand(out, "docker", "pull", image);
  } catch (IOException | InterruptedException e) {
    logger.error(e.getMessage(), e);
    throw new InterpreterException(e);
  }
  return exit == 0;
}
 
示例24
protected int runCommand(InterpreterOutput out, String... command)
    throws IOException, InterruptedException {
  ProcessBuilder builder = new ProcessBuilder(command);
  builder.redirectErrorStream(true);
  Process process = builder.start();
  InputStream stdout = process.getInputStream();
  BufferedReader br = new BufferedReader(new InputStreamReader(stdout));
  String line;
  while ((line = br.readLine()) != null) {
    out.write(line + "\n");
  }
  int r = process.waitFor(); // Let the process finish.
  return r;
}
 
示例25
protected InterpreterContext getInterpreterContext() {
  return InterpreterContext.builder()
      .setNoteId("noteId")
      .setParagraphId("paragraphId")
      .setInterpreterOut(new InterpreterOutput(null))
      .setIntpEventClient(mock(RemoteInterpreterEventClient.class))
      .build();
}
 
示例26
@Before
public void setUp() throws Exception {
  Properties p = new Properties();
  p.setProperty("zeppelin.python", "python");
  p.setProperty("zeppelin.python.maxResult", "100");
  p.setProperty("zeppelin.python.useIPython", "false");

  intpGroup = new InterpreterGroup();

  out = new InterpreterOutput(this);
  context = InterpreterContext.builder()
      .setInterpreterOut(out)
      .build();
  InterpreterContext.set(context);

  python = new PythonInterpreter(p);
  python.setInterpreterGroup(intpGroup);
  python.open();

  sql = new PythonInterpreterPandasSql(p);
  sql.setInterpreterGroup(intpGroup);

  intpGroup.put("note", Arrays.asList(python, sql));


  // to make sure python is running.
  InterpreterResult ret = python.interpret("print(\"python initialized\")\n", context);
  assertEquals(ret.message().toString(), InterpreterResult.Code.SUCCESS, ret.code());

  sql.open();
}
 
示例27
@Test
public void testActivateEnv() throws InterpreterException {
  InterpreterContext context = getInterpreterContext();
  docker.interpret("activate env", context);
  verify(python, times(1)).open();
  verify(python, times(1)).close();
  verify(docker, times(1)).pull(any(InterpreterOutput.class), anyString());
  verify(python).setPythonExec(Mockito.matches("docker run -i --rm -v.*"));
}
 
示例28
@Override
public void setUp() throws InterpreterException {
  Properties properties = new Properties();
  properties.setProperty(SparkStringConstants.MASTER_PROP_NAME, "local");
  properties.setProperty(SparkStringConstants.APP_NAME_PROP_NAME, "Zeppelin Test");
  properties.setProperty("zeppelin.spark.useHiveContext", "false");
  properties.setProperty("zeppelin.spark.maxResult", "3");
  properties.setProperty("zeppelin.spark.importImplicit", "true");
  properties.setProperty("zeppelin.pyspark.python", "python");
  properties.setProperty("zeppelin.dep.localrepo", Files.createTempDir().getAbsolutePath());
  properties.setProperty("zeppelin.pyspark.useIPython", "false");
  properties.setProperty("zeppelin.python.gatewayserver_address", "127.0.0.1");
  properties.setProperty("zeppelin.spark.deprecatedMsg.show", "false");

  // create interpreter group
  intpGroup = new InterpreterGroup();
  intpGroup.put("note", new LinkedList<Interpreter>());

  InterpreterContext context = InterpreterContext.builder()
      .setInterpreterOut(new InterpreterOutput(null))
      .setIntpEventClient(mockRemoteEventClient)
      .build();
  InterpreterContext.set(context);
  LazyOpenInterpreter sparkInterpreter =
      new LazyOpenInterpreter(new SparkInterpreter(properties));

  intpGroup.get("note").add(sparkInterpreter);
  sparkInterpreter.setInterpreterGroup(intpGroup);

  LazyOpenInterpreter iPySparkInterpreter =
      new LazyOpenInterpreter(new IPySparkInterpreter(properties));
  intpGroup.get("note").add(iPySparkInterpreter);
  iPySparkInterpreter.setInterpreterGroup(intpGroup);

  interpreter = new LazyOpenInterpreter(new PySparkInterpreter(properties));
  intpGroup.get("note").add(interpreter);
  interpreter.setInterpreterGroup(intpGroup);

  interpreter.open();
}
 
示例29
private static InterpreterContext createInterpreterContext(RemoteInterpreterEventClient mockRemoteEventClient) {
  return InterpreterContext.builder()
      .setNoteId("noteId")
      .setParagraphId("paragraphId")
      .setIntpEventClient(mockRemoteEventClient)
      .setInterpreterOut(new InterpreterOutput(null))
      .build();
}
 
示例30
@BeforeClass
public static void setUp() throws Exception {
  Properties p = new Properties();
  p.setProperty(SparkStringConstants.MASTER_PROP_NAME, "local[4]");
  p.setProperty(SparkStringConstants.APP_NAME_PROP_NAME, "test");
  p.setProperty("zeppelin.spark.maxResult", "10");
  p.setProperty("zeppelin.spark.concurrentSQL", "true");
  p.setProperty("zeppelin.spark.sql.stacktrace", "true");
  p.setProperty("zeppelin.spark.useHiveContext", "true");
  p.setProperty("zeppelin.spark.deprecatedMsg.show", "false");

  intpGroup = new InterpreterGroup();
  sparkInterpreter = new SparkInterpreter(p);
  sparkInterpreter.setInterpreterGroup(intpGroup);

  sqlInterpreter = new SparkSqlInterpreter(p);
  sqlInterpreter.setInterpreterGroup(intpGroup);
  intpGroup.put("session_1", new LinkedList<Interpreter>());
  intpGroup.get("session_1").add(sparkInterpreter);
  intpGroup.get("session_1").add(sqlInterpreter);

  context = InterpreterContext.builder()
      .setNoteId("noteId")
      .setParagraphId("paragraphId")
      .setParagraphTitle("title")
      .setAngularObjectRegistry(new AngularObjectRegistry(intpGroup.getId(), null))
      .setResourcePool(new LocalResourcePool("id"))
      .setInterpreterOut(new InterpreterOutput(null))
      .setIntpEventClient(mock(RemoteInterpreterEventClient.class))
      .build();
  InterpreterContext.set(context);

  sparkInterpreter.open();
  sqlInterpreter.open();
}