Java源码示例:org.openrdf.query.Dataset
示例1
protected void uploadDataset(Dataset dataset)
throws Exception
{
RepositoryConnection con = dataRep.getConnection();
try {
// Merge default and named graphs to filter duplicates
Set<URI> graphURIs = new HashSet<URI>();
graphURIs.addAll(dataset.getDefaultGraphs());
graphURIs.addAll(dataset.getNamedGraphs());
for (Resource graphURI : graphURIs) {
upload(((URI)graphURI), graphURI);
}
}
finally {
con.close();
}
}
示例2
protected String readInputData(Dataset dataset) throws Exception {
final StringBuilder sb = new StringBuilder();
if (dataset != null) {
Set<URI> graphURIs = new HashSet<URI>();
graphURIs.addAll(dataset.getDefaultGraphs());
graphURIs.addAll(dataset.getNamedGraphs());
for (Resource graphURI : graphURIs) {
URL graphURL = new URL(graphURI.toString());
InputStream in = graphURL.openStream();
sb.append(IOUtil.readString(in));
}
}
return sb.toString();
}
示例3
@Override
protected void uploadDataset(Dataset dataset)
throws Exception
{
// RepositoryConnection con = dataRep.getConnection();
// try {
// Merge default and named graphs to filter duplicates
Set<URI> graphURIs = new HashSet<URI>();
graphURIs.addAll(dataset.getDefaultGraphs());
graphURIs.addAll(dataset.getNamedGraphs());
for (Resource graphURI : graphURIs) {
upload(((URI)graphURI), graphURI);
}
// }
// finally {
// con.close();
// }
}
示例4
public SPARQLQueryTest(String testURI, String name, String queryFileURL, String resultFileURL,
Dataset dataSet, boolean laxCardinality, boolean checkOrder)
{
super(name.replaceAll("\\(", " ").replaceAll("\\)", " "));
this.testURI = testURI;
this.queryFileURL = queryFileURL;
this.resultFileURL = resultFileURL;
this.dataset = dataSet;
this.laxCardinality = laxCardinality;
this.checkOrder = checkOrder;
}
示例5
public BigdataSparqlTest(String testURI, String name, String queryFileURL,
String resultFileURL, Dataset dataSet, boolean laxCardinality,
boolean checkOrder) {
super(testURI, name, queryFileURL, resultFileURL, dataSet,
laxCardinality, checkOrder);
}
示例6
public BigdataSparqlFullRWTxTest(String testURI, String name, String queryFileURL,
String resultFileURL, Dataset dataSet, boolean laxCardinality,
boolean checkOrder) {
super(testURI, name, queryFileURL, resultFileURL, dataSet,
laxCardinality, checkOrder);
}
示例7
public BigdataEmbeddedFederationSparqlTest(String testURI, String name,
String queryFileURL, String resultFileURL, Dataset dataSet,
boolean laxCardinality, boolean checkOrder) {
super(testURI, name, queryFileURL, resultFileURL, dataSet,
laxCardinality, checkOrder);
}
示例8
/**
* Bigdata now uses an internal query model which differs significantly
* from the Sesame query model. Support is no longer provided for
* {@link TupleExpr} evaluation. SPARQL queries must be prepared and
* evaluated using a {@link BigdataSailRepositoryConnection}.
*
* @throws SailException
* <em>always</em>.
*/
public CloseableIteration<? extends BindingSet, QueryEvaluationException> evaluate(
final TupleExpr tupleExpr, //
final Dataset dataset,//
final BindingSet bindings,//
final boolean includeInferred//
) throws SailException {
throw new SailException(ERR_OPENRDF_QUERY_MODEL);
}
示例9
/**
* Optimize a SELECT query.
*
* @param store
* The {@link AbstractTripleStore} having the data.
* @param queryPlan
* The {@link ASTContainer}.
* @param globallyScopedBS
* The initial solution to kick things off.
*
* @return An optimized AST.
*
* @throws QueryEvaluationException
*/
static public QueryRoot optimizeQuery(
final ASTContainer astContainer,
final AST2BOpContext context,
final QueryBindingSet globallyScopedBS,
final Dataset dataset) throws QueryEvaluationException {
final AbstractTripleStore store = context.getAbstractTripleStore();
final DeferredResolutionResult resolved;
try {
// @see https://jira.blazegraph.com/browse/BLZG-1176
resolved = ASTDeferredIVResolution.resolveQuery(
store, astContainer, globallyScopedBS, dataset, context);
} catch (MalformedQueryException e) {
throw new QueryEvaluationException(e.getMessage(), e);
}
if (resolved.dataset != null) {
astContainer.getOriginalAST().setDataset(
new DatasetNode(resolved.dataset, false/* update */));
}
// Clear the optimized AST.
astContainer.clearOptimizedAST();
// Batch resolve Values to IVs and convert to bigdata binding set.
final IBindingSet[] globallyScopedBSAsList = toBindingSet(resolved.bindingSet) ;
// Convert the query (generates an optimized AST as a side-effect).
AST2BOpUtility.convert(context, globallyScopedBSAsList);
// The optimized AST.
final QueryRoot optimizedQuery = astContainer.getOptimizedAST();
return optimizedQuery;
}
示例10
@Override
protected CloseableIteration<? extends BindingSet, QueryEvaluationException> evaluateInternal(TupleExpr tupleExpr, Dataset dataset,
BindingSet bindings, boolean includeInferred) throws SailException {
// Lock stLock = _sail.getStatementsReadLock();
// Clone the tuple expression to allow for more aggressive optimizations
tupleExpr = tupleExpr.clone();
if (!(tupleExpr instanceof QueryRoot)) {
// Add a dummy root node to the tuple expressions to allow the
// optimizers to modify the actual root node
tupleExpr = new QueryRoot(tupleExpr);
}
TripleSource tripleSource = new CumulusRDFTripleSource();
EvaluationStrategy strategy = new RangeEvaluationStrategy(tripleSource, dataset);
new BindingAssigner().optimize(tupleExpr, dataset, bindings);
new ConstantOptimizer(strategy).optimize(tupleExpr, dataset, bindings);
new CompareOptimizer().optimize(tupleExpr, dataset, bindings);
new ConjunctiveConstraintSplitter().optimize(tupleExpr, dataset, bindings);
new DisjunctiveConstraintOptimizer().optimize(tupleExpr, dataset, bindings);
new SameTermFilterOptimizer().optimize(tupleExpr, dataset, bindings);
new QueryModelNormalizer().optimize(tupleExpr, dataset, bindings);
new CumulusQueryOptimizer(_crdf.isRangeIndexesSupportEnabled()).optimize(tupleExpr, dataset, bindings);
new QueryJoinOptimizer(_select_est).optimize(tupleExpr, dataset, bindings); //
new FilterOptimizer().optimize(tupleExpr, dataset, bindings);
new IterativeEvaluationOptimizer().optimize(tupleExpr, dataset, bindings);
new OrderLimitOptimizer().optimize(tupleExpr, dataset, bindings);
try {
return strategy.evaluate(tupleExpr, EmptyBindingSet.getInstance());
} catch (QueryEvaluationException e) {
e.printStackTrace();
throw new SailException(e);
}
}
示例11
@Override
public void optimize(final TupleExpr tupleExpr, final Dataset dataset, final BindingSet bindings) {
// use native support for range queries
if (_ranges_indexed) {
tupleExpr.visit(new RangeQueryVisitor(tupleExpr));
tupleExpr.visit(new OrderByVisitor(tupleExpr));
}
// use native cumulus model
tupleExpr.visit(new CumulusNativeModelVisitor());
}
示例12
public Dataset getDataset() {
return query.getDataset();
}
示例13
public void setDataset(Dataset arg0) {
query.setDataset(arg0);
}
示例14
public SPARQLQueryTest(String testURI, String name, String queryFileURL, String resultFileURL,
Dataset dataSet, boolean laxCardinality)
{
this(testURI, name, queryFileURL, resultFileURL, dataSet, laxCardinality, false);
}
示例15
SPARQLQueryTest createSPARQLQueryTest(String testURI, String name, String queryFileURL,
String resultFileURL, Dataset dataSet, boolean laxCardinality);
示例16
SPARQLQueryTest createSPARQLQueryTest(String testURI, String name, String queryFileURL,
String resultFileURL, Dataset dataSet, boolean laxCardinality, boolean checkOrder);
示例17
/**
* Unsupported operation.
*/
public BigdataParsedUpdate(TupleExpr tupleExpr, Dataset dataset) {
throw new UnsupportedOperationException();
}
示例18
/**
* Unsupported operation.
*/
public BigdataParsedQuery(TupleExpr tupleExpr, Dataset dataset) {
throw new UnsupportedOperationException();
}
示例19
public void setDataset(Dataset dataset) {
this.dataset = dataset;
}
示例20
public Dataset getDataset() {
return dataset;
}
示例21
/**
* Evaluate a boolean query.
*
* @param store
* The {@link AbstractTripleStore} having the data.
* @param astContainer
* The {@link ASTContainer}.
* @param globallyScopedBS
* The initial solution to kick things off.
*
* @return <code>true</code> if there are any solutions to the query.
*
* @throws QueryEvaluationException
*/
static public boolean evaluateBooleanQuery(
final AbstractTripleStore store,
final ASTContainer astContainer,
final BindingSet globallyScopedBS,
final Dataset dataset)
throws QueryEvaluationException {
final AST2BOpContext context = new AST2BOpContext(astContainer, store);
final DeferredResolutionResult resolved;
try {
// @see https://jira.blazegraph.com/browse/BLZG-1176
resolved = ASTDeferredIVResolution.resolveQuery(
store, astContainer, globallyScopedBS, dataset, context);
} catch (MalformedQueryException e) {
throw new QueryEvaluationException(e.getMessage(), e);
}
if (resolved.dataset != null) {
astContainer.getOriginalAST().setDataset(
new DatasetNode(resolved.dataset, false/* update */));
}
// Clear the optimized AST.
astContainer.clearOptimizedAST();
// Batch resolve Values to IVs and convert to bigdata binding set.
final IBindingSet[] globallyScopedBSAsList = toBindingSet(resolved.bindingSet) ;
// Convert the query (generates an optimized AST as a side-effect).
AST2BOpUtility.convert(context, globallyScopedBSAsList);
// The optimized AST.
final QueryRoot optimizedQuery = astContainer.getOptimizedAST();
// Note: We do not need to materialize anything for ASK.
final boolean materializeProjectionInQuery = context.materializeProjectionInQuery
&& !optimizedQuery.hasSlice();
CloseableIteration<BindingSet, QueryEvaluationException> itr = null;
try {
itr = ASTEvalHelper.evaluateQuery(
astContainer,
context,
materializeProjectionInQuery,
new IVariable[0]// required
);
return itr.hasNext();
} finally {
if (itr != null) {
/**
* Ensure query is terminated. An interrupt during hasNext()
* should cause the query to terminate through itr.close().
*
* @see <a
* href="https://sourceforge.net/apps/trac/bigdata/ticket/707">
* BlockingBuffer.close() does not unblock threads </a>
*/
itr.close();
}
}
}
示例22
/**
* Evaluate a SELECT query.
*
* @param store
* The {@link AbstractTripleStore} having the data.
* @param queryPlan
* The {@link ASTContainer}.
* @param globallyScopedBS
* The initial solution to kick things off.
*
* @return An object from which the solutions may be drained.
*
* @throws QueryEvaluationException
*/
static public TupleQueryResult evaluateTupleQuery(
final AbstractTripleStore store,
final ASTContainer astContainer,
final QueryBindingSet globallyScopedBS,
final Dataset dataset) throws QueryEvaluationException {
final AST2BOpContext context = new AST2BOpContext(astContainer, store);
final QueryRoot optimizedQuery =
optimizeQuery(astContainer, context, globallyScopedBS, dataset);
// Get the projection for the query.
final IVariable<?>[] projected = astContainer.getOptimizedAST()
.getProjection().getProjectionVars();
final List<String> projectedSet = new LinkedList<String>();
for (IVariable<?> var : projected)
projectedSet.add(var.getName());
final boolean materializeProjectionInQuery = context.materializeProjectionInQuery
&& !optimizedQuery.hasSlice();
final CloseableIteration<BindingSet, QueryEvaluationException> itr = ASTEvalHelper
.evaluateQuery(astContainer, context,
materializeProjectionInQuery, projected);
TupleQueryResult r = null;
try {
r = new TupleQueryResultImpl(projectedSet, itr);
return r;
} finally {
if (r == null) {
/**
* Ensure query is terminated if assignment to fails. E.g., if
* interrupted during the ctor.
*
* @see <a
* href="https://sourceforge.net/apps/trac/bigdata/ticket/707">
* BlockingBuffer.close() does not unblock threads </a>
*/
itr.close();
}
}
}
示例23
/**
* Evaluate a SPARQL UPDATE request (core method).
*
* @param astContainer
* The query model.
* @param ctx
* The evaluation context.
* @param dataset
* A dataset which will override the data set declaration for
* each {@link DeleteInsertGraph} operation in the update
* sequence (optional).
* @param includeInferred
* if inferences should be included in various operations.
*
* @return The timestamp of the commit point.
*
* @throws SailException
*
* TODO timeout for update?
*/
static public long executeUpdate(//
final BigdataSailRepositoryConnection conn,//
final ASTContainer astContainer,//
final Dataset dataset,
final boolean includeInferred,//
final QueryBindingSet bs
) throws UpdateExecutionException {
if(conn == null)
throw new IllegalArgumentException();
if(astContainer == null)
throw new IllegalArgumentException();
final DeferredResolutionResult resolved;
try {
// @see https://jira.blazegraph.com/browse/BLZG-1176
resolved = ASTDeferredIVResolution.resolveUpdate(conn.getTripleStore(), astContainer, bs, dataset);
} catch (MalformedQueryException e) {
throw new UpdateExecutionException(e.getMessage(), e);
}
try {
if (dataset != null) {
/*
* Apply the optional data set override.
*/
applyDataSet(conn.getTripleStore(), astContainer, resolved.dataset);
}
final AST2BOpUpdateContext ctx = new AST2BOpUpdateContext(
astContainer, conn);
doSparqlLogging(ctx);
// Propagate attribute.
ctx.setIncludeInferred(includeInferred);
// Batch resolve Values to IVs and convert to bigdata binding set.
final IBindingSet[] bindingSets = toBindingSet(resolved.bindingSet) ;
// Propagate bindings
ctx.setQueryBindingSet(bs);
ctx.setBindings(bindingSets);
ctx.setDataset(dataset);
/*
* Convert the query (generates an optimized AST as a side-effect).
*/
AST2BOpUpdate.optimizeUpdateRoot(ctx);
/*
* Generate and execute physical plans for the update operations.
*/
AST2BOpUpdate.convertUpdate(ctx);
return ctx.getCommitTime();
} catch (Exception ex) {
ex.printStackTrace();
throw new UpdateExecutionException(ex);
}
}
示例24
public DeferredResolutionResult(final BindingSet bindingSet, final Dataset dataset) {
this.bindingSet = bindingSet;
this.dataset = dataset;
}
示例25
/**
* Do deferred resolution of IVs, which were left unresolved while preparing the update
* @param store - triple store, which will be used for values resolution
* @param ast - AST model of the update, which should be resolved
* @param bs - binding set, which should be resolved
* @param dataset
* @return
* @throws MalformedQueryException
*/
public static DeferredResolutionResult resolveUpdate(final AbstractTripleStore store, final ASTContainer ast, final BindingSet bs, final Dataset dataset) throws MalformedQueryException {
final ASTDeferredIVResolution termsResolver = new ASTDeferredIVResolution(store);
// process provided binding set
BindingSet resolvedBindingSet = termsResolver.handleBindingSet(store, bs);
// process provided dataset
final Dataset resolvedDataset = termsResolver.handleDataset(store, dataset);
/*
* Prevent running IV resolution more than once.
* Property RESOLVED is set after resolution completed,
* so subsequent repetitive calls to update execute
* (for example with different bindings) would not result
* in running resolution again.
*/
if (Boolean.TRUE.equals(ast.getProperty(Annotations.RESOLVED))) {
/*
* Resolve binding set or dataset if there are any values to be processed
*/
if (!termsResolver.deferred.isEmpty()) {
termsResolver.resolveIVs(store);
}
return new DeferredResolutionResult(resolvedBindingSet, resolvedDataset);
}
final long beginNanos = System.nanoTime();
final UpdateRoot qc = (UpdateRoot)ast.getProperty(Annotations.ORIGINAL_AST);
/*
* Handle dataset declaration. It only appears for DELETE/INSERT
* (aka ASTModify). It is attached to each DeleteInsertNode for
* which it is given.
*/
final Map<IDataSetNode, List<ASTDatasetClause>> dcLists = new LinkedHashMap<>();
for (final Update update: qc.getChildren()) {
if (update instanceof IDataSetNode) {
final List<ASTDatasetClause> dcList = new ArrayList();
dcList.addAll(update.getDatasetClauses());
dcLists.put((IDataSetNode)update, dcList);
}
}
termsResolver.resolve(store, qc, dcLists, bs);
if (ast.getOriginalUpdateAST().getPrefixDecls()!=null && !ast.getOriginalUpdateAST().getPrefixDecls().isEmpty()) {
qc.setPrefixDecls(ast.getOriginalUpdateAST().getPrefixDecls());
}
ast.setOriginalUpdateAST(qc);
ast.setResolveValuesTime(System.nanoTime() - beginNanos);
ast.setProperty(Annotations.RESOLVED, Boolean.TRUE);
return new DeferredResolutionResult(resolvedBindingSet, resolvedDataset);
}
示例26
/**
* Do deferred resolution of IVs, which were left unresolved after execution of each Update in UpdateRoot
* @param store - triple store, which will be used for values resolution
* @param ast - AST model of the update, which should be resolved
* @param bs - binding set, which should be resolved
* @param dataset
* @return
* @throws MalformedQueryException
*/
public static DeferredResolutionResult resolveUpdate(final AbstractTripleStore store, final Update update, final BindingSet bs, final Dataset dataset) throws MalformedQueryException {
final ASTDeferredIVResolution termsResolver = new ASTDeferredIVResolution(store);
// process provided binding set
BindingSet resolvedBindingSet = termsResolver.handleBindingSet(store, bs);
// process provided dataset
final Dataset resolvedDataset = termsResolver.handleDataset(store, dataset);
// final long beginNanos = System.nanoTime();
termsResolver.resolve(store, update, null/*datasetClauseLists*/, bs);
// ast.setResolveValuesTime(System.nanoTime() - beginNanos);
return new DeferredResolutionResult(resolvedBindingSet, resolvedDataset);
}
示例27
public RangeEvaluationStrategy(TripleSource tripleSource, Dataset dataset) {
super(tripleSource, dataset);
}
示例28
public BigdataFederationSparqlTest(String URI, String name, String query,
String results, Dataset dataSet, boolean laxCardinality,
boolean checkOrder) {
super(URI, name, query, results, dataSet, laxCardinality, checkOrder);
}
示例29
/**
* Evaluate a bigdata query model.
*
* @param queryRoot
* The query model.
* @param dataset
* The data set (optional).
* @param bindings
* The initial bindings.
* @param includeInferred
* <code>true</code> iff inferences will be considered when
* reading on access paths.
*
* @return The {@link CloseableIteration} from which the solutions may
* be drained.
*
* @throws SailException
*
* @deprecated Consider removing this method from our public API. It is
* no longer in any code path for the bigdata code base.
* Embedded applications requiring high level evaluation
* should use {@link BigdataSailRepositoryConnection}. It
* does not call through here, but goes directly to the
* {@link ASTEvalHelper}.
*/
public CloseableIteration<? extends BindingSet, QueryEvaluationException> evaluate(
final QueryRoot queryRoot, //
final Dataset dataset,//
final BindingSet bindings,//
final boolean includeInferred//
) throws SailException {
final ASTContainer astContainer = new ASTContainer(queryRoot);
final QueryRoot originalQuery = astContainer.getOriginalAST();
originalQuery.setIncludeInferred(includeInferred);
try {
flushStatementBuffers(true/* assertions */, true/* retractions */);
return ASTEvalHelper.evaluateTupleQuery(getTripleStore(),
astContainer, new QueryBindingSet(bindings), dataset);
} catch (QueryEvaluationException e) {
throw new SailException(e);
}
}
示例30
public DatasetNode(final Dataset dataset, final boolean update) {
this(DataSetSummary.toInternalValues(dataset.getDefaultGraphs()),
DataSetSummary.toInternalValues(dataset.getNamedGraphs()),
update);
}