Java源码示例:org.eclipse.rdf4j.query.UpdateExecutionException
示例1
@Test
public void testCreateExistingGraph() throws Exception {
logger.debug("executing testCreateExistingGraph");
StringBuilder update = new StringBuilder();
update.append(getNamespaceDeclarations());
update.append("CREATE GRAPH <" + graph1 + "> ");
Update operation = con.prepareUpdate(QueryLanguage.SPARQL, update.toString());
try {
operation.execute();
fail("creation of existing graph should have resulted in error.");
} catch (UpdateExecutionException e) {
// expected behavior
if (con.isActive()) {
con.rollback();
}
}
}
示例2
/**
* @throws RepositoryException if an issue occurs making the connection
* @throws MalformedQueryException if an issue occurs inserting data
* @throws UpdateExecutionException if an issue occurs inserting data
*/
@Before
public void setUp() throws RepositoryException, MalformedQueryException, UpdateExecutionException {
Repository repo = new SailRepository(new MemoryStore());
repo.initialize();
connection = repo.getConnection();
servlet = new ExploreServlet();
ValueFactory factory = connection.getValueFactory();
foo = factory.createIRI("http://www.test.com/foo");
bar = factory.createIRI("http://www.test.com/bar");
bang = factory.createIRI("http://www.test.com/bang");
foos = new IRI[128];
for (int i = 0; i < foos.length; i++) {
foos[i] = factory.createIRI("http://www.test.com/foo/" + i);
}
builder = mock(TupleResultBuilder.class);
}
示例3
@Override
public void execute() throws UpdateExecutionException {
ParsedUpdate parsedUpdate = getParsedUpdate();
List<UpdateExpr> updateExprs = parsedUpdate.getUpdateExprs();
Map<UpdateExpr, Dataset> datasetMapping = parsedUpdate.getDatasetMapping();
for (UpdateExpr updateExpr : updateExprs) {
Dataset activeDataset = getMergedDataset(datasetMapping.get(updateExpr));
try {
AbstractQueryPreparer.this.execute(updateExpr, activeDataset, getBindings(), getIncludeInferred(),
getMaxExecutionTime());
} catch (UpdateExecutionException e) {
if (!updateExpr.isSilent()) {
throw e;
}
}
}
}
示例4
@Override
public void commit() throws RepositoryException {
synchronized (transactionLock) {
if (isActive()) {
synchronized (transactionLock) {
flushPendingAdds();
flushPendingRemoves();
// treat commit as a no-op if transaction string is empty
if (sparqlTransaction.length() > 0) {
SPARQLUpdate transaction = new SPARQLUpdate(client, null, sparqlTransaction.toString());
try {
transaction.execute();
} catch (UpdateExecutionException e) {
throw new RepositoryException("error executing transaction", e);
}
}
sparqlTransaction = null;
}
} else {
throw new RepositoryException("no transaction active.");
}
}
}
示例5
@Override
public void execute(RepositoryConnection con) throws RepositoryException {
try {
Update preparedUpdate = con.prepareUpdate(QueryLanguage.SPARQL, getUpdateString(), getBaseURI());
preparedUpdate.setIncludeInferred(isIncludeInferred());
preparedUpdate.setDataset(getDataset());
if (getBindings() != null) {
for (Binding binding : getBindings()) {
preparedUpdate.setBinding(binding.getName(), binding.getValue());
}
}
preparedUpdate.execute();
} catch (MalformedQueryException | UpdateExecutionException e) {
throw new RepositoryException(e);
}
}
示例6
private void performUpdate(final String query, final SailRepositoryConnection conn, final ServletOutputStream os, final String infer, final String vis) throws RepositoryException, MalformedQueryException, IOException {
final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
if (infer != null && infer.length() > 0) {
update.setBinding(RdfCloudTripleStoreConfiguration.CONF_INFER, VALUE_FACTORY.createLiteral(Boolean.parseBoolean(infer)));
}
if (conn.getSailConnection() instanceof RdfCloudTripleStoreConnection && vis != null) {
final RdfCloudTripleStoreConnection<?> sailConnection = (RdfCloudTripleStoreConnection<?>) conn.getSailConnection();
sailConnection.getConf().set(RdfCloudTripleStoreConfiguration.CONF_CV, vis);
}
final long startTime = System.currentTimeMillis();
try {
update.execute();
} catch (final UpdateExecutionException e) {
final String message = "Update could not be successfully completed for query: ";
os.print(String.format(message + "%s\n\n", StringEscapeUtils.escapeHtml4(query)));
log.error(message + LogUtils.clean(query), e);
}
log.info(String.format("Update Time = %.3f\n", (System.currentTimeMillis() - startTime) / 1000.));
}
示例7
@Override
public void update(String query) {
try (RepositoryConnection conn = repo.getConnection()) {
conn.prepareUpdate(QueryLanguage.SPARQL, adjustedQuery(query)).execute();
} catch (MalformedQueryException | UpdateExecutionException | RepositoryException e) {
throw new TripleStoreException(String.format("Query [%s]", query), e);
}
}
示例8
@Test
public void contextualInsertDeleteData()
throws RepositoryException, MalformedQueryException, UpdateExecutionException {
StringBuilder insert = new StringBuilder();
insert.append(getNamespaceDeclarations());
insert.append("INSERT DATA { ex:alice foaf:knows ex:bob. ex:alice foaf:mbox \"[email protected]\" .} ");
SimpleDataset ds = new SimpleDataset();
ds.setDefaultInsertGraph(graph2);
ds.addDefaultRemoveGraph(graph2);
Update updInsert = con.prepareUpdate(QueryLanguage.SPARQL, insert.toString());
updInsert.setDataset(ds);
updInsert.execute();
assertTrue(con.hasStatement(alice, FOAF.KNOWS, bob, true, graph2));
assertTrue(con.hasStatement(alice, FOAF.MBOX, f.createLiteral("[email protected]"), true, graph2));
StringBuilder update = new StringBuilder();
update.append(getNamespaceDeclarations());
update.append("DELETE DATA { ex:alice foaf:knows ex:bob. ex:alice foaf:mbox \"[email protected]\" .} ");
Update updDelete = con.prepareUpdate(QueryLanguage.SPARQL, update.toString());
updDelete.setDataset(ds);
updDelete.execute();
String msg = "statement should have been deleted.";
assertFalse(msg, con.hasStatement(alice, FOAF.KNOWS, bob, true, graph2));
assertFalse(msg, con.hasStatement(alice, FOAF.MBOX, f.createLiteral("[email protected]"), true, graph2));
}
示例9
/**
* Execute a SPARQL or SERQL update
*
* @param queryLn query language
* @param queryString query string
* @throws RepositoryException
* @throws UpdateExecutionException
* @throws MalformedQueryException
*/
protected void executeUpdate(QueryLanguage queryLn, String queryString)
throws RepositoryException, UpdateExecutionException, MalformedQueryException {
Repository repository = state.getRepository();
consoleIO.writeln("Executing update...");
long startTime = System.nanoTime();
try (RepositoryConnection con = repository.getConnection()) {
con.prepareUpdate(queryLn, queryString).execute();
}
long endTime = System.nanoTime();
consoleIO.writeln("Update executed in " + (endTime - startTime) / 1_000_000 + " ms");
}
示例10
private void executeUpdate(String updateString) throws Exception {
try (RepositoryConnection con = repository.getConnection()) {
try {
con
.prepareUpdate(QueryLanguage.SPARQL, updateString)
.execute();
} catch (RepositoryException | MalformedQueryException | UpdateExecutionException e) {
throw new BadRequestException(e.getMessage());
}
}
}
示例11
private void updateQueryRepository(final String update)
throws RepositoryException, UpdateExecutionException, MalformedQueryException {
LOGGER.info("SPARQL/Update of Query Storage:\n--\n{}\n--", update);
try (RepositoryConnection connection = this.queries.getConnection()) {
connection.prepareUpdate(QueryLanguage.SPARQL, update).execute();
}
}
示例12
@Override
public void execute() throws UpdateExecutionException {
ParsedUpdate parsedUpdate = getParsedUpdate();
List<UpdateExpr> updateExprs = parsedUpdate.getUpdateExprs();
Map<UpdateExpr, Dataset> datasetMapping = parsedUpdate.getDatasetMapping();
SailUpdateExecutor executor = new SailUpdateExecutor(con, vf, parserConfig);
for (UpdateExpr updateExpr : updateExprs) {
Dataset activeDataset = getMergedDataset(datasetMapping.get(updateExpr));
try {
boolean localTransaction = isLocalTransaction();
if (localTransaction) {
beginLocalTransaction();
}
executor.executeUpdate(updateExpr, activeDataset, getBindings(), getIncludeInferred(),
getMaxExecutionTime());
if (localTransaction) {
commitLocalTransaction();
}
} catch (RDF4JException | IOException e) {
logger.warn("exception during update execution: ", e);
if (!updateExpr.isSilent()) {
throw new UpdateExecutionException(e);
}
}
}
}
示例13
@Override
public void execute() throws UpdateExecutionException {
ParsedUpdate parsedUpdate = getParsedUpdate();
List<UpdateExpr> updateExprs = parsedUpdate.getUpdateExprs();
Map<UpdateExpr, Dataset> datasetMapping = parsedUpdate.getDatasetMapping();
SailUpdateExecutor executor = new SailUpdateExecutor(con.getSailConnection(), con.getValueFactory(),
con.getParserConfig());
boolean localTransaction = false;
try {
if (!getConnection().isActive()) {
localTransaction = true;
beginLocalTransaction();
}
for (UpdateExpr updateExpr : updateExprs) {
Dataset activeDataset = getMergedDataset(datasetMapping.get(updateExpr));
try {
executor.executeUpdate(updateExpr, activeDataset, getBindings(), getIncludeInferred(),
getMaxExecutionTime());
} catch (RDF4JException | IOException e) {
logger.warn("exception during update execution: ", e);
if (!updateExpr.isSilent()) {
throw new UpdateExecutionException(e);
}
}
}
if (localTransaction) {
commitLocalTransaction();
localTransaction = false;
}
} finally {
if (localTransaction) {
rollbackLocalTransaction();
}
}
}
示例14
@Override
public void execute() throws UpdateExecutionException {
try {
long start = System.currentTimeMillis();
sesameUpdate.execute();
log.debug("Query Plan\n{}", sesameUpdate.toString());
log.info("Query Execution Time: {}ms", System.currentTimeMillis() - start);
} catch (org.eclipse.rdf4j.query.UpdateExecutionException e) {
throw new UpdateExecutionException(e);
}
}
示例15
public static void testInfer(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException,
UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
// Add data
String query = "INSERT DATA\n"//
+ "{ \n"//
+ " <http://acme.com/people/Mike> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:type1>. "
+ " <urn:type1> <http://www.w3.org/2000/01/rdf-schema#subClassOf> <urn:superclass>. }";
log.info("Performing Query");
final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
// refresh the graph for inferencing (otherwise there is a five minute wait)
((RdfCloudTripleStore<?>) sail).getInferenceEngine().refreshGraph();
query = "select ?s { ?s <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:superclass> . }";
final CountingResultHandler resultHandler = new CountingResultHandler();
final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
resultHandler.resetCount();
}
示例16
public static void testInfer(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException,
UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
// Add data
String query = "INSERT DATA\n"//
+ "{ \n"//
+ " <http://acme.com/people/Mike> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:type1>. "
+ " <urn:type1> <http://www.w3.org/2000/01/rdf-schema#subClassOf> <urn:superclass>. }";
log.info("Performing Query");
final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
// refresh the graph for inferencing (otherwise there is a five minute wait)
((RdfCloudTripleStore<?>) sail).getInferenceEngine().refreshGraph();
query = "select ?s { ?s <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:superclass> . }";
final CountingResultHandler resultHandler = new CountingResultHandler();
final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
resultHandler.resetCount();
}
示例17
public static void testAddNamespaces(final SailRepositoryConnection conn) throws MalformedQueryException, RepositoryException,
UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException {
conn.setNamespace("rya", "http://rya.com");
final RepositoryResult<Namespace> results = conn.getNamespaces();
for (final Namespace space : Iterations.asList(results)){
System.out.println(space.getName() + ", " + space.getPrefix());
}
}
示例18
public static void testLubmFile(final SailRepositoryConnection conn) throws MalformedQueryException, RepositoryException,
UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, RDFParseException, IOException {
final String query = LubmQuery.LUBM_QUERY_14.getSparqlQuery();
log.info("Query to be Performed on LUBM Data :\n\n" + query + "\n");
log.info("Adding LUBM Data from: " + LUBM_FILE.toAbsolutePath());
addTriples(conn, LUBM_FILE.toFile(), RDFFormat.NTRIPLES);
log.info("Executing LUBM Query");
final CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() > 0);
resultHandler.resetCount();
final String deleteQuery = "DELETE WHERE { ?s ?p ?o }";
log.info("Deleting LUBM Data");
final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, deleteQuery);
update.execute();
final String selectAllQuery = "SELECT * WHERE { ?s ?p ?o }";
log.info("Confirming LUBM Data Cleared");
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, selectAllQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 0);
}
示例19
private static void testTemporalFreeGeoSearch(
final SailRepositoryConnection conn)
throws MalformedQueryException, RepositoryException,
UpdateExecutionException, TupleQueryResultHandlerException,
QueryEvaluationException {
String queryString;
TupleQuery tupleQuery;
CountingResultHandler tupleHandler;
// ring containing point
queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
+ "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
+ "PREFIX time: <http://www.w3.org/2006/time#> "//
+ "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> "//
+ "PREFIX fts: <http://rdf.useekm.com/fts#> "//
+ "SELECT ?feature ?point ?wkt ?event ?time ?person ?match" //
+ "{" //
+ " ?event a time:Instant . \n"//
+ " ?event time:inXSDDateTime ?time . \n"//
+ " FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
+ " ?feature a geo:Feature . "//
+ " ?feature geo:hasGeometry ?point . "//
+ " ?point a geo:Point . "//
+ " ?point geo:asWKT ?wkt . "//
+ " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)). " //
+ " ?person a <http://example.org/ontology/Person> . "//
+ " ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+ " FILTER(fts:text(?match, \"pal*\")) " //
+ "}";//
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 0 ); // TODO ==5 some data is missing for this query!
}
示例20
private void addToFooContext(String pattern)
throws UpdateExecutionException, RepositoryException, MalformedQueryException {
connection.prepareUpdate(QueryLanguage.SPARQL, PREFIX + pattern + SUFFIX).execute();
}
示例21
protected void runTest(final CustomGraphQueryInferencer inferencer) throws RepositoryException, RDFParseException,
IOException, MalformedQueryException, UpdateExecutionException {
// Initialize
Repository sail = new SailRepository(inferencer);
sail.initialize();
RepositoryConnection connection = sail.getConnection();
try {
connection.begin();
connection.clear();
connection.add(new StringReader(initial), BASE, RDFFormat.TURTLE);
// Test initial inferencer state
Collection<Value> watchPredicates = inferencer.getWatchPredicates();
assertThat(watchPredicates).hasSize(testData.predCount);
Collection<Value> watchObjects = inferencer.getWatchObjects();
assertThat(watchObjects).hasSize(testData.objCount);
Collection<Value> watchSubjects = inferencer.getWatchSubjects();
assertThat(watchSubjects).hasSize(testData.subjCount);
ValueFactory factory = connection.getValueFactory();
if (resourceFolder.startsWith(PREDICATE)) {
assertThat(watchPredicates.contains(factory.createIRI(BASE, "brotherOf"))).isTrue();
assertThat(watchPredicates.contains(factory.createIRI(BASE, "parentOf"))).isTrue();
} else {
IRI bob = factory.createIRI(BASE, "Bob");
IRI alice = factory.createIRI(BASE, "Alice");
assertThat(watchSubjects).contains(bob, alice);
assertThat(watchObjects).contains(bob, alice);
}
// Test initial inferencing results
assertThat(Iterations.asSet(connection.getStatements(null, null, null, true)))
.hasSize(testData.initialCount);
// Test results after removing some statements
connection.prepareUpdate(QueryLanguage.SPARQL, delete).execute();
assertThat(Iterations.asSet(connection.getStatements(null, null, null, true)))
.hasSize(testData.countAfterRemove);
// Tidy up. Storage gets re-used for subsequent tests, so must clear here,
// in order to properly clear out any inferred statements.
connection.clear();
connection.commit();
} finally {
connection.close();
}
sail.shutDown();
}
示例22
@Test
public void testCustomQueryInference() throws RepositoryException, RDFParseException, MalformedQueryException,
UpdateExecutionException, IOException, UnsupportedQueryLanguageException, SailException {
runTest(createRepository(true));
}
示例23
@Test
public void testCustomQueryInferenceImplicitMatcher()
throws RepositoryException, RDFParseException, MalformedQueryException, UpdateExecutionException,
IOException, UnsupportedQueryLanguageException, SailException {
runTest(createRepository(false));
}
示例24
protected abstract void execute(UpdateExpr updateExpr, Dataset dataset, BindingSet bindings,
boolean includeInferred, int maxExecutionTime) throws UpdateExecutionException;
示例25
@Override
public void execute() throws UpdateExecutionException {
}
示例26
@Override
public void execute() throws UpdateExecutionException {
}
示例27
public static void testAddAndDelete(final SailRepositoryConnection conn)
throws MalformedQueryException, RepositoryException,
UpdateExecutionException, QueryEvaluationException,
TupleQueryResultHandlerException, AccumuloException,
AccumuloSecurityException, TableNotFoundException {
// Add data
String query = "INSERT DATA\n"//
+ "{ GRAPH <http://updated/test> {\n"//
+ " <http://acme.com/people/Mike> " //
+ " <http://acme.com/actions/likes> \"A new book\" ;\n"//
+ " <http://acme.com/actions/likes> \"Avocados\" .\n"
+ "} }";
log.info("Performing Query");
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
final CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL,
query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 2);
resultHandler.resetCount();
// Delete Data
query = "DELETE DATA\n" //
+ "{ GRAPH <http://updated/test> {\n"
+ " <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
+ " <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 0);
}
示例28
public static void testAddAndDelete(final SailRepositoryConnection conn) throws MalformedQueryException,
RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException,
AccumuloException, AccumuloSecurityException, TableNotFoundException {
// Add data
String query = "INSERT DATA\n"//
+ "{ GRAPH <http://updated/test> {\n"//
+ " <http://acme.com/people/Mike> " //
+ " <http://acme.com/actions/likes> \"A new book\" ;\n"//
+ " <http://acme.com/actions/likes> \"Avocados\" .\n" + "} }";
log.info("Performing Query");
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
query = "select ?x {GRAPH <http://updated/test> {?x <http://acme.com/actions/likes> \"A new book\" . "//
+ " ?x <http://acme.com/actions/likes> \"Avocados\" }}";
final CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
resultHandler.resetCount();
// Delete Data
query = "DELETE DATA\n" //
+ "{ GRAPH <http://updated/test> {\n"
+ " <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
+ " <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
query = "select ?x {GRAPH <http://updated/test> {?x <http://acme.com/actions/likes> \"A new book\" . "//
+ " ?x <http://acme.com/actions/likes> \"Avocados\" }}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 0);
}
示例29
public static void testPropertyChainInferenceAltRepresentation(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException,
UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
// Add data
String query = "INSERT DATA\n"//
+ "{ GRAPH <http://updated/test> {\n"//
+ " <urn:jenGreatGranMother> <urn:Motherof> <urn:jenGranMother> . "
+ " <urn:jenGranMother> <urn:isChildOf> <urn:jenGreatGranMother> . "
+ " <urn:jenGranMother> <urn:Motherof> <urn:jenMother> . "
+ " <urn:jenMother> <urn:isChildOf> <urn:jenGranMother> . "
+ " <urn:jenMother> <urn:Motherof> <urn:jen> . "
+ " <urn:jen> <urn:isChildOf> <urn:jenMother> . "
+ " <urn:jen> <urn:Motherof> <urn:jenDaughter> . }}";
log.info("Performing Query");
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
query = "select ?p { GRAPH <http://updated/test> {?s <urn:Motherof>/<urn:Motherof> ?p}}";
CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
// try adding a property chain and querying for it
query = "INSERT DATA\n"//
+ "{ GRAPH <http://updated/test> {\n"//
+ " <urn:greatMother> owl:propertyChainAxiom <urn:12342> . " +
" <urn:12342> <http://www.w3.org/1999/02/22-rdf-syntax-ns#first> _:node1atjakcvbx15023 . " +
" _:node1atjakcvbx15023 <http://www.w3.org/2002/07/owl#inverseOf> <urn:isChildOf> . " +
" <urn:12342> <http://www.w3.org/1999/02/22-rdf-syntax-ns#rest> _:node1atjakcvbx15123 . " +
" _:node1atjakcvbx15123 <http://www.w3.org/1999/02/22-rdf-syntax-ns#rest> <http://www.w3.org/1999/02/22-rdf-syntax-ns#nil> . " +
" _:node1atjakcvbx15123 <http://www.w3.org/1999/02/22-rdf-syntax-ns#first> <urn:MotherOf> . }}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
((RdfCloudTripleStore<?>) sail).getInferenceEngine().refreshGraph();
resultHandler.resetCount();
query = "select ?x { GRAPH <http://updated/test> {<urn:jenGreatGranMother> <urn:greatMother> ?x}}";
resultHandler = new CountingResultHandler();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
}
示例30
public static void testPropertyChainInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException,
UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
// Add data
String query = "INSERT DATA\n"//
+ "{ GRAPH <http://updated/test> {\n"//
+ " <urn:paulGreatGrandfather> <urn:father> <urn:paulGrandfather> . "
+ " <urn:paulGrandfather> <urn:father> <urn:paulFather> . " +
" <urn:paulFather> <urn:father> <urn:paul> . " +
" <urn:paul> <urn:father> <urn:paulSon> . }}";
log.info("Performing Query");
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
query = "select ?p { GRAPH <http://updated/test> {<urn:paulGreatGrandfather> <urn:father>/<urn:father> ?p}}";
CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
// try adding a property chain and querying for it
query = "INSERT DATA\n"//
+ "{ GRAPH <http://updated/test> {\n"//
+ " <urn:greatGrandfather> owl:propertyChainAxiom <urn:1234> . " +
" <urn:1234> <http://www.w3.org/2000/10/swap/list#length> 3 . " +
" <urn:1234> <http://www.w3.org/2000/10/swap/list#index> (0 <urn:father>) . " +
" <urn:1234> <http://www.w3.org/2000/10/swap/list#index> (1 <urn:father>) . " +
" <urn:1234> <http://www.w3.org/2000/10/swap/list#index> (2 <urn:father>) . }}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
query = "INSERT DATA\n"//
+ "{ GRAPH <http://updated/test> {\n"//
+ " <urn:grandfather> owl:propertyChainAxiom <urn:12344> . " +
" <urn:12344> <http://www.w3.org/2000/10/swap/list#length> 2 . " +
" <urn:12344> <http://www.w3.org/2000/10/swap/list#index> (0 <urn:father>) . " +
" <urn:12344> <http://www.w3.org/2000/10/swap/list#index> (1 <urn:father>) . }}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
((RdfCloudTripleStore<?>) sail).getInferenceEngine().refreshGraph();
resultHandler.resetCount();
query = "select ?p { GRAPH <http://updated/test> {<urn:paulGreatGrandfather> <urn:greatGrandfather> ?p}}";
resultHandler = new CountingResultHandler();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
resultHandler.resetCount();
query = "select ?s ?p { GRAPH <http://updated/test> {?s <urn:grandfather> ?p}}";
resultHandler = new CountingResultHandler();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
}