View Javadoc
1   /*
2    * Copyright 2018-2022 Medical Information Systems Research Group (https://medical.zcu.cz),
3    * Department of Computer Science and Engineering, University of West Bohemia.
4    * Address: Univerzitni 8, 306 14 Plzen, Czech Republic.
5    *
6    * Author Petr Vcelak (vcelak@kiv.zcu.cz).
7    *
8    * This file is part of MRECore project.
9    *
10   * MRECore is free software: you can redistribute it and/or modify
11   * it under the terms of the GNU General Public License as published by
12   * the Free Software Foundation, either version 3 of the License.
13   *
14   * MRECore is distributed in the hope that it will be useful,
15   * but WITHOUT ANY WARRANTY; without even the implied warranty of
16   * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17   * GNU General Public License for more details.
18   *
19   * You should have received a copy of the GNU General Public License
20   * along with MRECore. If not, see <http://www.gnu.org/licenses/>.
21   */
22  package cz.zcu.mre.dao;
23  
24  import cz.zcu.mre.data.datatable.DataTableQueryCriteria;
25  import java.io.File;
26  import java.io.IOException;
27  import java.nio.charset.Charset;
28  import java.nio.file.Files;
29  import java.nio.file.Paths;
30  import java.util.ArrayList;
31  import java.util.List;
32  import java.util.logging.Level;
33  import org.apache.commons.io.FileUtils;
34  //import org.apache.jena.query.DatasetAccessor;
35  //import org.apache.jena.query.DatasetAccessorFactory;
36  import org.apache.jena.query.Dataset;
37  import org.apache.jena.query.DatasetFactory;
38  import org.apache.jena.query.Query;
39  import org.apache.jena.query.QueryExecution;
40  import org.apache.jena.query.QueryExecutionFactory;
41  import org.apache.jena.query.QueryParseException;
42  import org.apache.jena.query.QuerySolution;
43  import org.apache.jena.query.ResultSet;
44  import org.apache.jena.rdf.model.Model;
45  import org.apache.jena.rdf.model.Resource;
46  import org.apache.jena.update.Update;
47  import org.apache.jena.update.UpdateExecutionFactory;
48  import org.apache.jena.update.UpdateFactory;
49  import org.apache.jena.update.UpdateProcessor;
50  import org.apache.jena.update.UpdateRequest;
51  import org.slf4j.Logger;
52  import org.slf4j.LoggerFactory;
53  import org.springframework.beans.factory.InitializingBean;
54  import org.springframework.beans.factory.annotation.Value;
55  import org.springframework.data.domain.Pageable;
56  
57  /**
58   * SPARQL Repository Implementation.
59   *
60   * @author Petr Vcelak (vcelak@kiv.zcu.cz)
61   */
62  public class SPARQLRepositoryImpl implements SPARQLRepository, InitializingBean {
63  
64      private static final Logger LOG = LoggerFactory.getLogger(SPARQLRepositoryImpl.class);
65      public static final String DEFAULT_SERVICE = "http://127.0.0.1:3030/mrecore";
66  
67      // set true to output query with line ends
68      // set false to disable wrapping of query at line ends
69      private static boolean enabledQueryWrap = true;
70  
71      public static enum QUERY {
72          SELECT,
73          INSERT,
74          UPDATE,
75          DELETE,
76          CONSTRUCT,
77          DESCRIBE
78      };
79      private static final String PATH_QUERY = "/query";
80      private static final String PATH_UPDATE = "/update";
81  
82      @Value("${sparql.service.url}")
83      private String serviceURL = DEFAULT_SERVICE;
84  
85      @Value("${sparql.service.file}")
86      private String serviceURLFile;
87  
88      // TODO add other URLs
89      private static final String DEFAULT_DOWNLOAD_FILENAME = "download.nt";
90      private static final String DEFAULT_RDF_NOTATION = "N-TRIPLE";
91  
92      /**
93       * Default constructor.
94       */
95      public SPARQLRepositoryImpl() {
96      }
97  
98      @Override
99      public void afterPropertiesSet() throws Exception {
100 
101         if (serviceURLFile == null) {
102             return;
103         }
104 
105         // read file sparql.service.file
106         try {
107             File file = new File(System.getProperty("user.home").concat(File.separator).concat(serviceURLFile));
108             LOG.debug("Read SPARQL Endpoint URL from file {}", file.getAbsolutePath());
109 
110             String url = FileUtils.readFileToString(file, Charset.forName("US-ASCII")).trim();
111             if (url != null) {
112                 LOG.info("Use SPARQL Endpoint service {} from file {}", url, file.getAbsolutePath());
113                 this.serviceURL = url;
114             } else {
115                 throw new IllegalArgumentException("Wrong path to file with SPARQL Endpoint URL: " + serviceURLFile);
116             }
117         } catch (IOException ex) {
118             java.util.logging.Logger.getLogger(SPARQLRepositoryImpl.class.getName()).log(Level.SEVERE, null, ex);
119         }
120     }
121 
122     @Override
123     public String getServiceURL() {
124         return serviceURL;
125     }
126 
127     @Override
128     public void setServiceURL(String serviceURL) {
129         this.serviceURL = serviceURL;
130     }
131 
132     @Override
133     public String getUpdateServicePath() {
134         return serviceURL + PATH_UPDATE;
135     }
136 
137     @Override
138     public String getQueryServicePath() {
139         return serviceURL + PATH_QUERY;
140     }
141 
142     @Override
143     public String getAllServicePath() {
144         return serviceURL;
145     }
146 
147     @Override
148     public Model getGraphDefault() {
149         Dataset dataset = DatasetFactory.create(getAllServicePath());
150         Model model = dataset.getDefaultModel();
151         return model;
152     }
153 
154     @Override
155     public Model getGraphNamed(String graphURI) {
156         Dataset dataset = DatasetFactory.create(getAllServicePath());
157         Model model = dataset.getNamedModel(graphURI);
158         return model;
159     }
160 
161     /**
162      * Copy the whole dataset from one SPARQL service into another.
163      *
164      * @param sparqlServiceFrom Source SPARQL Endpoint.
165      * @param sparqlServiceTo Destination SPARQL Endpoint.
166      * @param graphURI Graph URI is optional.
167      */
168     @Override
169     public void datasetClone(String sparqlServiceFrom, String sparqlServiceTo, String graphURI) {
170 
171         Dataset datasetFrom = DatasetFactory.create(sparqlServiceFrom);
172         Dataset datasetTo = DatasetFactory.create(sparqlServiceTo);
173 
174         if (graphURI != null) {
175             Model model = datasetFrom.getNamedModel(graphURI);
176             datasetTo.getNamedModel(graphURI).add(model);
177         } else {
178             Model model = datasetFrom.getDefaultModel();
179             datasetTo.getDefaultModel().add(model);
180         }
181     }
182 
183     @Override
184     public void datasetDownload(String sparqlServiceFrom, String graphURI) {
185         datasetDownload(sparqlServiceFrom, graphURI, null, null);
186     }
187 
188     /**
189      * Download the whole dataset by a SPARQL service.
190      *
191      * @param sparqlServiceFrom SPARQL Endpoint.
192      * @param graphURI Graph URI.
193      * @param outputFile Output file name.
194      * @param rdfNotation Output RDF notation.
195      */
196     @Override
197     public void datasetDownload(String sparqlServiceFrom, String graphURI, String outputFile, String rdfNotation) {
198 
199         if (outputFile == null || outputFile.isEmpty()) {
200             outputFile = DEFAULT_DOWNLOAD_FILENAME;
201         }
202 
203         if (rdfNotation == null || rdfNotation.isEmpty()) {
204             rdfNotation = DEFAULT_RDF_NOTATION;
205         }
206 
207         Dataset datasetFrom = DatasetFactory.create(sparqlServiceFrom);
208         Model model = (graphURI != null) ? datasetFrom.getNamedModel(graphURI) : datasetFrom.getDefaultModel();
209 
210         try {
211             model.write(Files.newOutputStream(Paths.get(outputFile)), rdfNotation);
212         } catch (IOException ex) {
213             LOG.error("", ex);
214         }
215     }
216 
217     @Override
218     public void datasetAppend(String sparqlServiceTo, String graphURI, Model data) {
219 
220         Dataset dataset = DatasetFactory.create(sparqlServiceTo);
221         if (graphURI != null) {
222             dataset.getNamedModel(graphURI).add(data);
223         } else {
224             dataset.getDefaultModel().add(data);
225         }
226     }
227 
228     @Override
229     public void datasetAppend(Model data, String graphURI) {
230 
231         Dataset dataset = DatasetFactory.create(getAllServicePath());
232         if (graphURI != null) {
233             // named graph
234             dataset.getNamedModel(getAllServicePath() + "/" + graphURI).add(data);
235         } else {
236             // default graph
237             dataset.getDefaultModel().add(data);
238         }
239     }
240 
241     @Override
242     public boolean queryAsk(String ask) {
243         LOG.debug("ASK execute on {} query {}", getQueryServicePath(), queryWrap(ask));
244 
245         // run Ask SPARQL query
246         QueryExecution qe = QueryExecutionFactory.sparqlService(getQueryServicePath(), ask);
247         boolean results = qe.execAsk();
248 
249         LOG.info("ASK result: '{}' on {} for query: {}", results, getQueryServicePath(), queryWrap(ask));
250         return results;
251     }
252 
253     public boolean queryAsk(Query query) {
254         LOG.info("ASK execute on {} query {}", getQueryServicePath(), query);
255 
256         // run Ask SPARQL query
257         QueryExecution qe = QueryExecutionFactory.sparqlService(getQueryServicePath(), query);
258         boolean results = qe.execAsk();
259 
260         LOG.info("ASK result: '{}' on {} for query: {}", results, getQueryServicePath(), query);
261         return results;
262     }
263 
264     @Override
265     public Model queryConstruct(String construct) {
266         LOG.debug("CONSTRUCT execute on {} query {}", getQueryServicePath(), queryWrap(construct));
267 
268         // run Construct SPARQL query
269         QueryExecution qe = QueryExecutionFactory.sparqlService(getQueryServicePath(), construct);
270         Model results = qe.execConstruct();
271 
272         LOG.info("CONSTRUCT result size '{}' on {} for query: {}", results.size(), getQueryServicePath(), queryWrap(construct));
273         return results;
274     }
275 
276     @Override
277     public Model queryConstruct(Query query) {
278         LOG.debug("CONSTRUCT execute on {} query {}", getQueryServicePath(), query);
279 
280         // run Construct SPARQL query
281         QueryExecution qe = QueryExecutionFactory.sparqlService(getQueryServicePath(), query);
282         Model results = qe.execConstruct();
283 
284         LOG.info("CONSTRUCT result size '{}' on {} for query: {}", results.size(), getQueryServicePath(), query);
285         return results;
286     }
287 
288     @Override
289     public long queryCount(String queryTotalSize) {
290         LOG.debug("SELECT total size on {} query {}", getQueryServicePath(), queryWrap(queryTotalSize));
291 
292         long total = 0;
293         List<QuerySolution> result = querySelect(queryTotalSize);
294 
295         if (result.size() == 1
296                 && result.get(0) != null
297                 && result.get(0).get("total") != null
298                 && result.get(0).get("total").asLiteral() != null) {
299             total = result.get(0).get("total").asLiteral().getLong();
300         }
301 
302         LOG.info("SELECT result size '{}' on {} for query: {}", total, getQueryServicePath(), queryWrap(queryTotalSize));
303         return total;
304     }
305 
306     @Override
307     public Model queryDescribe(String describe) {
308         LOG.debug("DESCRIBE execute on {} query {}", getQueryServicePath(), queryWrap(describe));
309 
310         Model results;
311         // run Describe SPARQL query
312         QueryExecution qe = QueryExecutionFactory.sparqlService(getQueryServicePath(), describe);
313         results = qe.execDescribe();
314 
315         LOG.info("DESCRIBE result size '{}' on {} for query: {}", results.size(), getQueryServicePath(), queryWrap(describe));
316         return results;
317     }
318 
319     @Override
320     public Model queryDescribe(Query describe) {
321         LOG.debug("DESCRIBE execute on {} query {}", getQueryServicePath(), describe);
322 
323         // run Describe SPARQL query
324         QueryExecution qe = QueryExecutionFactory.sparqlService(getQueryServicePath(), describe);
325         Model results = qe.execDescribe();
326 
327         LOG.info("DESCRIBE result size '{}' on {} for query: {}", results.size(), getQueryServicePath(), describe);
328         return results;
329     }
330 
331     @Override
332     public List<QuerySolution> querySelect(String select) {
333         LOG.debug("SELECT execute on {} query {}", getQueryServicePath(), queryWrap(select));
334 
335         List<QuerySolution> qs = new ArrayList<>();
336 
337         // run Select SPARQL query
338         QueryExecution qe = QueryExecutionFactory.sparqlService(getQueryServicePath(), select);
339         ResultSet results = qe.execSelect();
340 
341         // ResultSet needs to be parsed before closing QueryExecution
342         while (results.hasNext()) {
343             qs.add(results.next());
344         }
345 
346         LOG.info("SELECT result size '{}' on {} for query: {}", qs.size(), getQueryServicePath(), queryWrap(select));
347         return qs;
348     }
349 
350     @Override
351     public List<QuerySolution> querySelect(String select, Pageable pageable) {
352 
353         StringBuilder sb = new StringBuilder(select);
354 
355         // add limit and offset to the select query
356         if (pageable.getPageSize() > 0) {
357             sb.append(" LIMIT ");
358             sb.append(pageable.getPageSize());
359             sb.append(" OFFSET ");
360             sb.append(pageable.getOffset());
361         }
362 
363         return querySelect(sb.toString());
364     }
365 
366     @Override
367     public List<QuerySolution> querySelect(Query select, Pageable pageable) {
368         LOG.debug("SELECT execute on {} query {}", getQueryServicePath(), select);
369 
370         // add limit and offset to the select query
371         if (pageable.getPageSize() > 0) {
372             select.setLimit(pageable.getPageSize());
373             select.setOffset(pageable.getOffset());
374         }
375 
376         return querySelect(select);
377     }
378 
379     @Override
380     public List<QuerySolution> querySelect(Query select, DataTableQueryCriteria queryCriteria) {
381         LOG.debug("SELECT execute on {} query {}", getQueryServicePath(), select);
382 
383         // add limit and offset to the select query
384         if (queryCriteria.getPageSize() > 0) {
385             select.setLimit(queryCriteria.getPageSize());
386             select.setOffset(queryCriteria.getPageNumber() * queryCriteria.getPageSize());
387         }
388 
389         return querySelect(select);
390     }
391 
392     @Override
393     public List<QuerySolution> querySelect(Query select) {
394         LOG.debug("SELECT execute on {} query {}", getQueryServicePath(), select);
395 
396         List<QuerySolution> qs = new ArrayList<>();
397 
398         // run Select SPARQL query
399         try (QueryExecution qe = QueryExecutionFactory.sparqlService(getQueryServicePath(), select)) {
400 //            if (qe != null) {
401                 ResultSet results = qe.execSelect();
402 
403                 // ResultSet needs to be parsed before closing QueryExecution
404                 while (results.hasNext()) {
405                     qs.add(results.next());
406                 }
407 //            }
408         }
409 
410         LOG.info("SELECT result size '{}' on {} for query: {}", qs.size(), getQueryServicePath(), select);
411         return qs;
412     }
413 
414     @Override
415     public boolean queryDelete(String delete) {
416 
417         return queryUpdate(delete);
418     }
419 
420     @Override
421     public boolean queryDelete(Update delete) {
422 
423         return queryUpdate(delete);
424     }
425 
426     @Override
427     public boolean queryInsert(String insert) {
428 
429         return queryUpdate(insert);
430     }
431 
432     @Override
433     public boolean queryInsert(Update insert) {
434 
435         return queryUpdate(insert);
436     }
437 
438     /**
439      * Update query.
440      *
441      * @param update Query to be executed.
442      * @return True on success, false otherwise.
443      */
444     @Override
445     public boolean queryUpdate(String update) {
446         LOG.debug("UPDATE execute on {} query {}", getQueryServicePath(), queryWrap(update));
447         try {
448             UpdateProcessor upp = UpdateExecutionFactory.createRemote(UpdateFactory.create(update), getUpdateServicePath());
449             upp.execute();
450 
451             LOG.info("UPDATE result '{}' on {} for query: {}", "success", getQueryServicePath(), queryWrap(update));
452             return true;
453 
454         } catch (QueryParseException ex) {
455             LOG.error("UPDATE result '{}' on {} for query: {}", "fail", getQueryServicePath(), queryWrap(update), ex);
456             return false;
457         }
458     }
459 
460     /**
461      * Method call that can be used with Apache Jena's UpdateBuilder.
462      *
463      * @param update Instance of update.
464      * @return True or false.
465      */
466     @Override
467     public boolean queryUpdate(Update update) {
468 
469         if (update == null) {
470             throw new IllegalArgumentException("Update cannot be null");
471         }
472 
473         UpdateRequest updateRequest = new UpdateRequest(update);
474         //updateRequest.setBaseURI();
475         //updateRequest.setPrefixMapping();
476         //updateRequest.set...
477 
478         return queryUpdate(updateRequest);
479     }
480 //
481 //    @Override
482 //    public boolean query(Query query) {
483 //
484 //        if (query == null) {
485 //            throw new IllegalArgumentException("Update cannot be null");
486 //        }
487 //        LOG.info("Execute on {} query {}", getQueryServicePath(), query);
488 //
489 //        QueryExecution qe = QueryExecutionFactory.sparqlService(getUpdateServicePath(), query);
490 //        qe.execAsk();
491 //
492 //        try {
493 //            UpdateProcessor upp = UpdateExecutionFactory.createRemote(updateRequest, getUpdateServicePath());
494 //            upp.execute();
495 //
496 //            LOG.info("UPDATE result '{}' on {} for query: {}", "success", getQueryServicePath(), updateRequest);
497 //            return true;
498 //
499 //        } catch (QueryParseException ex) {
500 //            LOG.error("UPDATE result '{}' on {} for query: {}", "fail", getQueryServicePath(), updateRequest, ex);
501 //            return false;
502 //        }
503 //    }
504 
505     @Override
506     public boolean queryUpdate(UpdateRequest updateRequest) {
507 
508         if (updateRequest == null) {
509             throw new IllegalArgumentException("Update cannot be null");
510         }
511         LOG.debug("UPDATE execute on {} query {}", getQueryServicePath(), updateRequest);
512 
513         try {
514             UpdateProcessor upp = UpdateExecutionFactory.createRemote(updateRequest, getUpdateServicePath());
515             upp.execute();
516 
517             LOG.info("UPDATE result '{}' on {} for query: {}", "success", getQueryServicePath(), updateRequest);
518             return true;
519 
520         } catch (QueryParseException ex) {
521             LOG.error("UPDATE result '{}' on {} for query: {}", "fail", getQueryServicePath(), updateRequest, ex);
522             return false;
523         }
524     }
525 
526     @Override
527     public boolean removeSubjectAll(Resource uri) {
528         LOG.info("Remove all data of the subject {}", uri.getURI());
529 
530         // TODO implement method
531         throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
532 
533         //return true;
534     }
535 
536     @Override
537     public Model getModel(String resourceUri) {
538         throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
539     }
540 
541     private static String queryWrap(String query) {
542 
543         if (enabledQueryWrap) {
544             return query;
545         }
546 
547         return query.replace("\n", "");
548     }
549 
550     public static boolean isEnabledQueryWrap() {
551         return enabledQueryWrap;
552     }
553 
554     public static void setEnabledQueryWrap(boolean enabledQueryWrap) {
555         SPARQLRepositoryImpl.enabledQueryWrap = enabledQueryWrap;
556     }
557 
558 }