Skip to content

Commit

Permalink
Merge pull request #700 from jonbullock/master
Browse files Browse the repository at this point in the history
Manual merge in of PR's for 2.7.0 release
  • Loading branch information
jonbullock authored May 22, 2021
2 parents dcc0e83 + fe280a2 commit a4ecc3e
Show file tree
Hide file tree
Showing 96 changed files with 2,715 additions and 1,499 deletions.
4 changes: 2 additions & 2 deletions appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,6 @@ install:
- gradlew.bat --version
- file C:\projects\jbake\jbake-core\src\test\resources\fixture\jbake.properties
build_script:
- gradlew.bat -i assemble
- gradlew.bat -Dfile.encoding=UTF-8 -i assemble
test_script:
- gradlew.bat -i -S check
- gradlew.bat -Dfile.encoding=UTF-8 -i -S check
3 changes: 2 additions & 1 deletion gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@ asciidoctorjVersion = 2.4.3
asciidoctorjDiagramVersion = 2.1.0
args4jVersion = 2.33
commonsIoVersion = 2.8.0
commonsConfigurationVersion = 1.10
commonsConfigurationVersion = 2.7
commonsBeanutilsVersion = 1.9.4
commonsLangVersion = 3.12.0
commonsVfs2Version = 2.7.0
freemarkerVersion = 2.3.31
Expand Down
3 changes: 2 additions & 1 deletion jbake-core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ description = "The core library of JBake"

dependencies {
implementation "commons-io:commons-io:$commonsIoVersion"
implementation "commons-configuration:commons-configuration:$commonsConfigurationVersion"
implementation "org.apache.commons:commons-configuration2:$commonsConfigurationVersion"
implementation "commons-beanutils:commons-beanutils:$commonsBeanutilsVersion"
implementation "org.apache.commons:commons-vfs2:$commonsVfs2Version", optional
implementation "org.apache.commons:commons-lang3:$commonsLangVersion"
implementation("com.googlecode.json-simple:json-simple:$jsonSimpleVersion") {
Expand Down
4 changes: 2 additions & 2 deletions jbake-core/src/main/java/org/jbake/app/Asset.java
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package org.jbake.app;

import org.apache.commons.configuration.CompositeConfiguration;
import org.apache.commons.configuration2.CompositeConfiguration;
import org.apache.commons.io.FileUtils;
import org.jbake.app.configuration.JBakeConfiguration;
import org.jbake.app.configuration.JBakeConfigurationFactory;
Expand All @@ -24,7 +24,7 @@ public class Asset {

private static final Logger LOGGER = LoggerFactory.getLogger(Asset.class);
private final List<Throwable> errors = new LinkedList<>();
private JBakeConfiguration config;
private final JBakeConfiguration config;

/**
* @param source Source file for the asset
Expand Down
230 changes: 89 additions & 141 deletions jbake-core/src/main/java/org/jbake/app/ContentStore.java

Large diffs are not rendered by default.

223 changes: 86 additions & 137 deletions jbake-core/src/main/java/org/jbake/app/Crawler.java
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
package org.jbake.app;

import com.orientechnologies.orient.core.record.impl.ODocument;
import org.apache.commons.configuration.CompositeConfiguration;
import org.apache.commons.configuration2.CompositeConfiguration;
import org.apache.commons.io.FilenameUtils;
import org.jbake.app.Crawler.Attributes.Status;
import org.jbake.app.configuration.JBakeConfiguration;
import org.jbake.app.configuration.JBakeConfigurationFactory;
import org.jbake.model.DocumentAttributes;
import org.jbake.model.DocumentModel;
import org.jbake.model.DocumentStatus;
import org.jbake.model.DocumentTypes;
import org.jbake.model.ModelAttributes;
import org.jbake.util.HtmlUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -28,10 +28,10 @@
*/
public class Crawler {

private static final Logger LOGGER = LoggerFactory.getLogger(Crawler.class);
private static final Logger logger = LoggerFactory.getLogger(Crawler.class);
private final ContentStore db;
private JBakeConfiguration config;
private Parser parser;
private final JBakeConfiguration config;
private final Parser parser;

/**
* @param db Database instance for content
Expand Down Expand Up @@ -63,24 +63,23 @@ public Crawler(ContentStore db, JBakeConfiguration config) {
public void crawl() {
crawl(config.getContentFolder());

LOGGER.info("Content detected:");
logger.info("Content detected:");
for (String docType : DocumentTypes.getDocumentTypes()) {
long count = db.getDocumentCount(docType);
if (count > 0) {
LOGGER.info("Parsed {} files of type: {}", count, docType);
logger.info("Parsed {} files of type: {}", count, docType);
}
}

}

public void crawlDataFiles() {
crawlDataFiles(config.getDataFolder());

LOGGER.info("Data files detected:");
String docType = "data";
logger.info("Data files detected:");
String docType = config.getDataFileDocType();
long count = db.getDocumentCount(docType);
if (count > 0) {
LOGGER.info("Parsed {} files", count);
logger.info("Parsed {} files", count);
}
}

Expand All @@ -95,41 +94,37 @@ private void crawl(File path) {
Arrays.sort(contents);
for (File sourceFile : contents) {
if (sourceFile.isFile()) {
StringBuilder sb = new StringBuilder();
sb.append("Processing [").append(sourceFile.getPath()).append("]... ");
String sha1 = buildHash(sourceFile);
String uri = buildURI(sourceFile);
boolean process = true;
DocumentStatus status = DocumentStatus.NEW;
for (String docType : DocumentTypes.getDocumentTypes()) {
status = findDocumentStatus(docType, uri, sha1);
if (status == DocumentStatus.UPDATED) {
sb.append(" : modified ");
db.deleteContent(docType, uri);

} else if (status == DocumentStatus.IDENTICAL) {
sb.append(" : same ");
process = false;
}
if (!process) {
break;
}
}
if (DocumentStatus.NEW == status) {
sb.append(" : new ");
}
if (process) { // new or updated
crawlSourceFile(sourceFile, sha1, uri);
}
LOGGER.info("{}", sb);
}
if (sourceFile.isDirectory()) {
crawlFile(sourceFile);
} else if (sourceFile.isDirectory()) {
crawl(sourceFile);
}
}
}
}

private void crawlFile(File sourceFile) {

StringBuilder sb = new StringBuilder();
sb.append("Processing [").append(sourceFile.getPath()).append("]... ");
String sha1 = buildHash(sourceFile);
String uri = buildURI(sourceFile);
DocumentStatus status = findDocumentStatus(uri, sha1);
if (status == DocumentStatus.UPDATED) {
sb.append(" : modified ");
db.deleteContent(uri);
} else if (status == DocumentStatus.IDENTICAL) {
sb.append(" : same ");
} else if (DocumentStatus.NEW == status) {
sb.append(" : new ");
}

logger.info("{}", sb);

if (status != DocumentStatus.IDENTICAL) {
processSourceFile(sourceFile, sha1, uri);
}
}

/**
* Crawl all files and folders looking for data files.
*
Expand All @@ -148,10 +143,10 @@ private void crawlDataFiles(File path) {
boolean process = true;
DocumentStatus status = DocumentStatus.NEW;
String docType = config.getDataFileDocType();
status = findDocumentStatus(docType, uri, sha1);
status = findDocumentStatus(uri, sha1);
if (status == DocumentStatus.UPDATED) {
sb.append(" : modified ");
db.deleteContent(docType, uri);
db.deleteContent(uri);
} else if (status == DocumentStatus.IDENTICAL) {
sb.append(" : same ");
process = false;
Expand All @@ -165,7 +160,7 @@ private void crawlDataFiles(File path) {
if (process) { // new or updated
crawlDataFile(sourceFile, sha1, uri, docType);
}
LOGGER.info("{}", sb);
logger.info("{}", sb);
}
if (sourceFile.isDirectory()) {
crawlDataFiles(sourceFile);
Expand All @@ -179,7 +174,7 @@ private String buildHash(final File sourceFile) {
try {
sha1 = FileUtil.sha1(sourceFile);
} catch (Exception e) {
LOGGER.error("unable to build sha1 hash for source file '{}'", sourceFile);
logger.error("unable to build sha1 hash for source file '{}'", sourceFile);
sha1 = "";
}
return sha1;
Expand All @@ -197,7 +192,7 @@ private String buildURI(final File sourceFile) {

// strip off leading / to enable generating non-root based sites
if (uri.startsWith(FileUtil.URI_SEPARATOR_CHAR)) {
uri = uri.substring(1, uri.length());
uri = uri.substring(1);
}

return uri;
Expand Down Expand Up @@ -250,83 +245,74 @@ private boolean useNoExtensionUri(String uri) {

private void crawlDataFile(final File sourceFile, final String sha1, final String uri, final String documentType) {
try {
Map<String, Object> fileContents = parser.processFile(sourceFile);
if (fileContents != null) {
fileContents.put(String.valueOf(DocumentAttributes.SHA1), sha1);
fileContents.put(String.valueOf(DocumentAttributes.RENDERED), true);
fileContents.put(Attributes.FILE, sourceFile.getPath());
fileContents.put(String.valueOf(DocumentAttributes.SOURCE_URI), uri);

ODocument doc = new ODocument(documentType);
doc.fromMap(fileContents);
boolean cached = fileContents.get(String.valueOf(DocumentAttributes.CACHED)) != null ? Boolean.valueOf((String) fileContents.get(String.valueOf(DocumentAttributes.CACHED))) : true;
doc.field(String.valueOf(DocumentAttributes.CACHED), cached);
doc.save();
DocumentModel document = parser.processFile(sourceFile);
if (document != null) {
document.setSha1(sha1);
document.setRendered(true);
document.setFile(sourceFile.getPath());
document.setSourceUri(uri);
document.setType(documentType);

db.addDocument(document);
} else {
LOGGER.warn("{} couldn't be parsed so it has been ignored!", sourceFile);
logger.warn("{} couldn't be parsed so it has been ignored!", sourceFile);
}
} catch (Exception ex) {
throw new RuntimeException("Failed crawling file: " + sourceFile.getPath() + " " + ex.getMessage(), ex);
}
}

private void crawlSourceFile(final File sourceFile, final String sha1, final String uri) {
try {
Map<String, Object> fileContents = parser.processFile(sourceFile);
if (fileContents != null) {
fileContents.put(Attributes.ROOTPATH, getPathToRoot(sourceFile));
fileContents.put(String.valueOf(DocumentAttributes.SHA1), sha1);
fileContents.put(String.valueOf(DocumentAttributes.RENDERED), false);
if (fileContents.get(Attributes.TAGS) != null) {
// store them as a String[]
String[] tags = (String[]) fileContents.get(Attributes.TAGS);
fileContents.put(Attributes.TAGS, tags);
}
fileContents.put(Attributes.FILE, sourceFile.getPath());
fileContents.put(String.valueOf(DocumentAttributes.SOURCE_URI), uri);
fileContents.put(Attributes.URI, uri);

String documentType = (String) fileContents.get(Attributes.TYPE);
if (fileContents.get(Attributes.STATUS).equals(Status.PUBLISHED_DATE)) {
if (fileContents.get(Attributes.DATE) != null && (fileContents.get(Attributes.DATE) instanceof Date)) {
if (new Date().after((Date) fileContents.get(Attributes.DATE))) {
fileContents.put(Attributes.STATUS, Status.PUBLISHED);
}
}
}
private void processSourceFile(final File sourceFile, final String sha1, final String uri) {
DocumentModel document = parser.processFile(sourceFile);

if (config.getUriWithoutExtension()) {
fileContents.put(Attributes.NO_EXTENSION_URI, uri.replace("/index.html", "/"));
}
if (document != null) {
if (DocumentTypes.contains(document.getType())) {
addAdditionalDocumentAttributes(document, sourceFile, sha1, uri);

if (config.getImgPathUpdate()) {
// Prevent image source url's from breaking
HtmlUtil.fixImageSourceUrls(fileContents, config);
HtmlUtil.fixImageSourceUrls(document, config);
}

ODocument doc = new ODocument(documentType);
doc.fromMap(fileContents);
boolean cached = fileContents.get(String.valueOf(DocumentAttributes.CACHED)) != null ? Boolean.valueOf((String) fileContents.get(String.valueOf(DocumentAttributes.CACHED))) : true;
doc.field(String.valueOf(DocumentAttributes.CACHED), cached);
doc.save();
db.addDocument(document);
} else {
LOGGER.warn("{} has an invalid header, it has been ignored!", sourceFile);
logger.warn("{} has an unknown document type '{}' and has been ignored!", sourceFile, document.getType());
}
} catch (Exception ex) {
throw new RuntimeException("Failed crawling file: " + sourceFile.getPath() + " " + ex.getMessage(), ex);
} else {
logger.warn("{} has an invalid header, it has been ignored!", sourceFile);
}
}

private void addAdditionalDocumentAttributes(DocumentModel document, File sourceFile, String sha1, String uri) {
document.setRootPath(getPathToRoot(sourceFile));
document.setSha1(sha1);
document.setRendered(false);
document.setFile(sourceFile.getPath());
document.setSourceUri(uri);
document.setUri(uri);
document.setCached(true);

if (document.getStatus().equals(ModelAttributes.Status.PUBLISHED_DATE)
&& (document.getDate() != null)
&& new Date().after(document.getDate())) {
document.setStatus(ModelAttributes.Status.PUBLISHED);
}

if (config.getUriWithoutExtension()) {
document.setNoExtensionUri(uri.replace("/index.html", "/"));
}
}

private String getPathToRoot(File sourceFile) {
return FileUtil.getUriPathToContentRoot(config, sourceFile);
}

private DocumentStatus findDocumentStatus(String docType, String uri, String sha1) {
DocumentList match = db.getDocumentStatus(docType, uri);
private DocumentStatus findDocumentStatus(String uri, String sha1) {
DocumentList<DocumentModel> match = db.getDocumentStatus(uri);
if (!match.isEmpty()) {
Map entries = match.get(0);
String oldHash = (String) entries.get(String.valueOf(DocumentAttributes.SHA1));
if (!(oldHash.equals(sha1)) || Boolean.FALSE.equals(entries.get(String.valueOf(DocumentAttributes.RENDERED)))) {
DocumentModel document = match.get(0);
String oldHash = document.getSha1();
if (!oldHash.equals(sha1) || !document.getRendered()) {
return DocumentStatus.UPDATED;
} else {
return DocumentStatus.IDENTICAL;
Expand All @@ -336,41 +322,4 @@ private DocumentStatus findDocumentStatus(String docType, String uri, String sha
}
}

public abstract static class Attributes {

public static final String DATE = "date";
public static final String STATUS = "status";
public static final String TYPE = "type";
public static final String TITLE = "title";
public static final String URI = "uri";
public static final String FILE = "file";
public static final String TAGS = "tags";
public static final String TAG = "tag";
public static final String ROOTPATH = "rootpath";
public static final String ID = "id";
public static final String NO_EXTENSION_URI = "noExtensionUri";
public static final String ALLTAGS = "alltags";
public static final String PUBLISHED_DATE = "published_date";
public static final String BODY = "body";
public static final String DB = "db";
public static final String DATA = "data";

private Attributes() {
}

/**
* Possible values of the {@link Attributes#STATUS} property
*
* @author ndx
*/
public abstract static class Status {
public static final String PUBLISHED_DATE = "published-date";
public static final String PUBLISHED = "published";
public static final String DRAFT = "draft";

private Status() {
}
}

}
}
Loading

0 comments on commit a4ecc3e

Please sign in to comment.