Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Annotation inject logs #249

Closed
wants to merge 18 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
253c7d1
[#244] Use annotations to inject log in s-pipes-cli
palagdan Jul 24, 2024
0314e86
[#244] Use annotations to ninject log in s-pipes-core
palagdan Jul 24, 2024
6eb9fdc
[#244] Use annotations to inject log in s-pipes-forms
palagdan Jul 24, 2024
41c47df
[#244] Use annotations to inject log in s-pipes-test
palagdan Jul 24, 2024
a004896
[#244] Use annotations to inject log in s-pipes-web
palagdan Jul 24, 2024
eabbd03
[#244] Remove annotations to inject log in s-pipes-forms because of t…
palagdan Jul 24, 2024
7fa413e
[#244] Use annotations to inject log in s-pipes-modules module-ckan2rdf
palagdan Jul 24, 2024
d0d0221
[#244] Use annotations to inject log in s-pipes-modules module-datase…
palagdan Jul 24, 2024
37ecda6
[#244] Use annotations to inject log in s-pipes-modules module-eccairs
palagdan Jul 24, 2024
f19454a
[#244] Use annotations to inject log in s-pipes-modules module-form
palagdan Jul 24, 2024
d0881dc
[#244] Use annotations to inject log in s-pipes-modules module-nlp
palagdan Jul 24, 2024
1acf9c8
[#244] Use annotations to inject log in s-pipes-modules module-rdf4j
palagdan Jul 24, 2024
1ec545f
[#244] Use annotations to inject log in s-pipes-modules module-sparql…
palagdan Jul 24, 2024
c1919b5
[#244] Use annotations to inject log in s-pipes-modules module-tarql
palagdan Jul 24, 2024
4ac7551
[#244] Use annotations to inject log in s-pipes-modules module-text-a…
palagdan Jul 24, 2024
f020a53
[#244] Lombok does not support ASJ compiler
palagdan Jul 24, 2024
f233704
Update protect-master.yml
palagdan Jul 24, 2024
47e773b
[#244] Add version of lombok to the parent pom.xml and remove version…
palagdan Jul 29, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/protect-master.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,6 @@ jobs:

# Test using proper JDK and MVN
- name: Test
uses: docker://maven:3-eclipse-temurin-21
uses: docker://maven:3-eclipse-temurin-17
with:
args: mvn test
7 changes: 7 additions & 0 deletions s-pipes-cli/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,11 @@
<artifactId>logback-core</artifactId>
<version>${ch.qos.logback.version}</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
</dependency>
<!-- IO utils -->
<dependency>
<groupId>commons-io</groupId>
Expand All @@ -72,6 +77,8 @@
</dependency>
</dependencies>



<build>
<plugins>
<plugin>
Expand Down
21 changes: 11 additions & 10 deletions s-pipes-cli/src/main/java/cz/cvut/spipes/cli/ExecuteModuleCLI.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import cz.cvut.spipes.manager.OntologyDocumentManager;
import cz.cvut.spipes.manager.SPipesScriptManager;
import cz.cvut.spipes.modules.Module;
import lombok.extern.slf4j.Slf4j;
import org.apache.jena.query.QuerySolution;
import org.apache.jena.query.QuerySolutionMap;
import org.apache.jena.rdf.model.Model;
Expand All @@ -29,7 +30,7 @@
import java.util.Map;
import java.util.stream.Collectors;


@Slf4j
public class ExecuteModuleCLI {

// cat input-data.rdf | s-pipes execute --instance "<http://url>"
Expand All @@ -39,7 +40,7 @@ public class ExecuteModuleCLI {
// > output.data.rdf


private static final Logger LOG = LoggerFactory.getLogger(ExecuteModuleCLI.class);

private static final String DEFAULT_DELIMITER = ";";
//@Option(name = "-d", aliases = "--delimiter", metaVar = "DELIMITER", usage = "Input variables data delimiter ('" + DEFAULT_DELIMITER + "' by default)")
//private String delimiter = DEFAULT_DELIMITER;
Expand Down Expand Up @@ -98,32 +99,32 @@ public static void main(String[] args) throws IOException {
CmdLineUtils.parseCommandLine(args, argParser);

String output = String.join(" ", args);
LOG.info("Executing external module/function ... " + output);
log.info("Executing external module/function ... " + output);

// ----- load input model
Model inputDataModel = ModelFactory.createDefaultModel();

if (asArgs.inputDataFiles != null) {
for (File idFile : asArgs.inputDataFiles) {
LOG.debug("Loading input data from file {} ...", idFile);
log.debug("Loading input data from file {} ...", idFile);
inputDataModel.read(new FileInputStream(idFile), null, FileUtils.langTurtle);
}
}
if (asArgs.isInputDataFromStdIn) {
LOG.info("Loading input data from std-in ...");
log.info("Loading input data from std-in ...");
inputDataModel.read(System.in, null, FileUtils.langTurtle);
}

// ----- load modules and functions
LOG.debug("Loading scripts ...");
log.debug("Loading scripts ...");
SPipesScriptManager scriptManager = scriptManager = createSPipesScriptManager();
OntoDocManager.registerAllSPINModules();

// ----- load input bindings
VariablesBinding inputVariablesBinding = new VariablesBinding();
if (asArgs.inputBindingParametersMap != null) {
inputVariablesBinding = new VariablesBinding(transform(asArgs.inputBindingParametersMap));
LOG.info("Loaded input variable binding ={}", inputVariablesBinding);
log.info("Loaded input variable binding ={}", inputVariablesBinding);
}

// ----- create execution context
Expand All @@ -139,7 +140,7 @@ public static void main(String[] args) throws IOException {
}
ExecutionContext outputExecutionContext = engine.executePipeline(module, inputExecutionContext);

LOG.info("Processing successfully finished.");
log.info("Processing successfully finished.");
// outputExecutionContext.getDefaultModel().write(System.out);

// Model inputBindingModel = null;
Expand Down Expand Up @@ -265,7 +266,7 @@ private static SPipesScriptManager createSPipesScriptManager() {

// load from environment variables
String spipesOntologiesPathsStr = System.getenv(AppConstants.SYSVAR_SPIPES_ONTOLOGIES_PATH);
LOG.debug("Loading scripts from system variable {} = {}", AppConstants.SYSVAR_SPIPES_ONTOLOGIES_PATH, spipesOntologiesPathsStr);
log.debug("Loading scripts from system variable {} = {}", AppConstants.SYSVAR_SPIPES_ONTOLOGIES_PATH, spipesOntologiesPathsStr);
if (spipesOntologiesPathsStr != null) {
scriptPaths.addAll(
Arrays.stream(spipesOntologiesPathsStr.split(";"))
Expand Down Expand Up @@ -303,7 +304,7 @@ public static List<String> registerGlobalScripts(OntologyDocumentManager ontDocM
ontoUri -> {
String loc = locMapper.getAltEntry(ontoUri);
if (loc.endsWith(".sms.ttl")) {
LOG.info("Registering script from file " + loc + ".");
log.info("Registering script from file " + loc + ".");
_globalScripts.add(ontoUri);
}
}
Expand Down
6 changes: 6 additions & 0 deletions s-pipes-core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,12 @@
<artifactId>logback-classic</artifactId>
</dependency>

<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
</dependency>

<!-- Test dependencies -->
<dependency>
<groupId>org.junit.jupiter</groupId>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package cz.cvut.spipes.engine;

import cz.cvut.spipes.modules.Module;
import lombok.extern.slf4j.Slf4j;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.Resource;
Expand All @@ -14,16 +15,16 @@
import java.util.function.Function;
import java.util.stream.Collectors;

@Slf4j
class ExecutionEngineImpl implements ExecutionEngine {

private static Logger LOG = LoggerFactory.getLogger(ExecutionEngineImpl.class);

private Set<ProgressListener> listeners = new HashSet<>();

private static int i = 0 ;

public ExecutionContext executePipeline(final Module module, final ExecutionContext inputContext) {
LOG.info("Executing script {} with context {}.", module.getResource(), inputContext.toSimpleString());
log.info("Executing script {} with context {}.", module.getResource(), inputContext.toSimpleString());
final long pipelineExecutionId = Instant.now().toEpochMilli()*1000+(i++);

fire((l) -> {l.pipelineExecutionStarted(pipelineExecutionId); return null;});
Expand All @@ -37,7 +38,7 @@ private void fire(final Function<ProgressListener,Void> function) {
try {
function.apply(listener);
} catch(final Exception e) {
LOG.warn("Listener {} failed.", listener, e);
log.warn("Listener {} failed.", listener, e);
}
});
}
Expand All @@ -58,17 +59,17 @@ private ExecutionContext _executePipeline(long pipelineExecutionId, Module modul
fire((l) -> {l.moduleExecutionStarted(pipelineExecutionId, moduleExecutionId, module, context, predecessorId); return null;});

if (module.getExecutionContext() != null) {
LOG.debug("Execution context for module {} already set.", module);
log.debug("Execution context for module {} already set.", module);
} else {
module.setInputContext(context);

LOG.info(" ##### " + module.getLabel());
if (LOG.isTraceEnabled()) {
LOG.trace("Using input context {}", context.toTruncatedSimpleString()); //TODO redundant code -> merge
log.info(" ##### " + module.getLabel());
if (log.isTraceEnabled()) {
log.trace("Using input context {}", context.toTruncatedSimpleString()); //TODO redundant code -> merge
}
ExecutionContext outputContext = module.execute();
if (LOG.isTraceEnabled()) {
LOG.trace("Returning output context {}", outputContext.toSimpleString());
if (log.isTraceEnabled()) {
log.trace("Returning output context {}", outputContext.toSimpleString());
}
module.addOutputBindings(context.getVariablesBinding());
}
Expand All @@ -80,18 +81,18 @@ private ExecutionContext _executePipeline(long pipelineExecutionId, Module modul
.collect(Collectors.toMap(Module::getResource, mod -> this._executePipeline(pipelineExecutionId, mod, context, moduleExecutionId)));


LOG.info(" ##### " + module.getLabel());
log.info(" ##### " + module.getLabel());
ExecutionContext mergedContext = mergeContexts(resource2ContextMap);
if (LOG.isTraceEnabled()) {
LOG.trace("Using input merged context {}", mergedContext.toTruncatedSimpleString());
if (log.isTraceEnabled()) {
log.trace("Using input merged context {}", mergedContext.toTruncatedSimpleString());
}
fire((l) -> {l.moduleExecutionStarted(pipelineExecutionId, moduleExecutionId, module, mergedContext, predecessorId); return null;});

module.setInputContext(mergedContext);

ExecutionContext outputContext = module.execute();
if (LOG.isTraceEnabled()) {
LOG.trace("Returning output context {}", outputContext.toSimpleString());
if (log.isTraceEnabled()) {
log.trace("Returning output context {}", outputContext.toSimpleString());
}
module.addOutputBindings(mergedContext.getVariablesBinding());
fire((l) -> {l.moduleExecutionFinished(pipelineExecutionId, moduleExecutionId, module); return null;});
Expand Down Expand Up @@ -128,7 +129,7 @@ private ExecutionContext mergeContexts(Map<Resource, ExecutionContext> resource2
VariablesBinding conflictingBinding = variablesBinding.extendConsistently(b);

if (! conflictingBinding.isEmpty()) {
LOG.warn("Module {} has conflicting variables binding {} with sibling modules ocurring in pipeline. ", modRes, context);
log.warn("Module {} has conflicting variables binding {} with sibling modules ocurring in pipeline. ", modRes, context);
}
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;

import lombok.extern.slf4j.Slf4j;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.Resource;
Expand All @@ -28,10 +30,9 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@Slf4j
public class PipelineFactory {

private static final Logger LOG = LoggerFactory.getLogger(PipelineFactory.class);

// TODO inheritence not involved, not static context
static Map<Resource, Class<? extends Module>> moduleTypes = new HashMap<>();

Expand All @@ -54,12 +55,12 @@ public static void registerModuleType(Resource moduleType, Class<? extends Modul
}

private static void _registerModuleType(Resource moduleType, Class<? extends Module> moduleClass) {
LOG.info(" module: {} -> {}", moduleType, moduleClass);
log.info(" module: {} -> {}", moduleType, moduleClass);
moduleTypes.put(moduleType, moduleClass);
}

private static void _registerFunctionType(Resource functionType, Class<? extends ARQFunction> functionClass) {
LOG.info(" function: {} -> {}", functionType, functionClass);
log.info(" function: {} -> {}", functionType, functionClass);
FunctionRegistry.get().put(functionType.getURI(), functionClass);
}

Expand Down Expand Up @@ -114,7 +115,7 @@ public static Module loadModule(@NotNull Resource moduleRes) {
// TODO multiple module types per resource
Resource moduleTypeRes = moduleRes.getPropertyResourceValue(RDF.type);
if (moduleTypeRes == null) {
LOG.error("Cannot load module {} as its {} property value is missing.", moduleRes, RDF.type);
log.error("Cannot load module {} as its {} property value is missing.", moduleRes, RDF.type);
return null;
}
return loadModule(moduleRes, moduleTypeRes);
Expand Down Expand Up @@ -173,7 +174,7 @@ private static Map<Resource, Module> loadAllModules(@NotNull Model configModel)
.map(st -> {
Module m = res2ModuleMap.get(st.getObject().asResource());
if (m == null) {
LOG.error("Ignoring statement {}. The object of the triple must have rdf:type {}.", st, SM.Module);
log.error("Ignoring statement {}. The object of the triple must have rdf:type {}.", st, SM.Module);
}
return m;
}).filter(m -> (m != null)).forEach(
Expand All @@ -193,7 +194,7 @@ private static Module loadModule(@NotNull Resource moduleRes, @NotNull Resource
Class<? extends Module> moduleClass = moduleTypes.get(moduleTypeRes);

if (moduleClass == null) {
LOG.error("Ignoring module {}. Its type {} is not registered.", moduleRes, moduleTypeRes);
log.error("Ignoring module {}. Its type {} is not registered.", moduleRes, moduleTypeRes);
return null;
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package cz.cvut.spipes.engine;

import lombok.extern.slf4j.Slf4j;
import org.apache.jena.query.QuerySolution;
import org.apache.jena.query.QuerySolutionMap;
import org.apache.jena.rdf.model.*;
Expand All @@ -14,11 +15,10 @@
import java.util.*;
import java.util.stream.Collectors;

@Slf4j
public class VariablesBinding {

// TODO stream variables etc.

private static Logger LOG = LoggerFactory.getLogger(VariablesBinding.class);
private static final int MAX_TRUNCATED_VALUE_SIZE = 300;
QuerySolutionMap binding = new QuerySolutionMap();

Expand All @@ -31,7 +31,7 @@ public VariablesBinding(QuerySolution querySolution) {
key -> {
RDFNode value = querySolution.get(key);
if (value == null) {
LOG.error("Ignoring variable binding with null value for the variable name \"{}\".", key);
log.error("Ignoring variable binding with null value for the variable name \"{}\".", key);
} else {
binding.add(key, value);
}
Expand Down Expand Up @@ -79,7 +79,7 @@ public VariablesBinding extendConsistently(VariablesBinding newVarsBinding) {

if ((oldNode != null) && (!oldNode.equals(newNode))) {
conflictingBinding.add(v, newNode);
LOG.warn("Variable \"{}\" have been bind to value \"{}\", ignoring assignment to value \"{}\".", v, oldNode, newNode);
log.warn("Variable \"{}\" have been bind to value \"{}\", ignoring assignment to value \"{}\".", v, oldNode, newNode);
} else {
this.add(v, newNode);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import cz.cvut.spipes.util.DateUtils;
import cz.cvut.spipes.util.Rdf4jUtils;
import cz.cvut.spipes.util.TempFileUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.Property;
import org.apache.jena.rdf.model.Resource;
Expand Down Expand Up @@ -45,9 +46,8 @@
import java.nio.file.Paths;
import java.util.*;

@Slf4j
public class AdvancedLoggingProgressListener implements ProgressListener {
private static final Logger LOG =
LoggerFactory.getLogger(AdvancedLoggingProgressListener.class);
/**
* Maps pipeline executions and module executions to the transformation object.
*/
Expand Down Expand Up @@ -173,7 +173,7 @@ private void persistPipelineExecutionFinished2(final EntityManager em, final lon

private void persistPipelineExecutionFinished(final EntityManager em, final long pipelineExecutionId) {
if (em.isOpen()) {
LOG.debug("Saving metadata about finished pipeline execution {}.", pipelineExecutionId);
log.debug("Saving metadata about finished pipeline execution {}.", pipelineExecutionId);
Date finishDate = new Date();
em.getTransaction().begin();

Expand Down Expand Up @@ -359,7 +359,7 @@ private void writeRawData(EntityManager em, URI contextUri, Model model) {
connection.getValueFactory().createIRI(contextUri.toString()));
connection.commit();
} catch (final RepositoryException | RDFParseException | RepositoryConfigException | IOException e) {
LOG.error(e.getMessage(), e);
log.error(e.getMessage(), e);
} finally {
if (connection != null && connection.isOpen()) {
connection.close();
Expand All @@ -386,7 +386,7 @@ private void saveModelToFile(String filePath, Model model) {
try (OutputStream fileIs = new FileOutputStream(file)) {
model.write(fileIs, FileUtils.langTurtle);
} catch (IOException e) {
LOG.error("Error during dataset snapshot saving.", e);
log.error("Error during dataset snapshot saving.", e);
}
}

Expand Down
Loading
Loading