Skip to content

Commit

Permalink
Merge pull request #1 from cheslip/main
Browse files Browse the repository at this point in the history
  • Loading branch information
tekacs authored Oct 31, 2022
2 parents 552cb2d + 541e9e5 commit 76a81a8
Show file tree
Hide file tree
Showing 7 changed files with 230 additions and 113 deletions.
156 changes: 52 additions & 104 deletions src/converters/logseq/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,38 +15,12 @@ import {
isIndexWithinBackticks,
} from '../../utils/utils';
import { IConverter } from '../IConverter';
import {
getAttributeDefinitionsFromName as getAttributeDefinitionsFromName,
getValueForAttribute,
hasField,
hasImages,
dateStringToRoamDateUID,
dateStringToYMD,
} from '../common'
import {
isDone,
isTodo,
replaceRoamSyntax,
setNodeAsDone,
setNodeAsTodo,
} from './logseqUtils';
import { hasImages, dateStringToRoamDateUID, dateStringToYMD } from '../common';
import { hasDuplicateProperties, isDone, isTodo, replaceRoamSyntax, setNodeAsDone, setNodeAsTodo } from './logseqUtils';
import { LogseqBlock, LogseqFile } from './types';

const DATE_REGEX = /^\w+\s\d{1,2}\w{2},\s\d+$/;

type LogseqFile = {
version: number;
blocks: LogseqBlock[];
};

type LogseqBlock = {
id: string;
'page-name': string;
properties: Record<string, unknown>;
format: 'markdown';
children?: LogseqBlock[];
content: string;
};

export class LogseqConverter implements IConverter {
private nodesForImport: Map<string, TanaIntermediateNode> = new Map();
private originalNodeNames: Map<string, string> = new Map();
Expand Down Expand Up @@ -130,93 +104,55 @@ export class LogseqConverter implements IConverter {
return theMetaNode.children?.length || 0;
}

// convers "foo::bar bas::bam" into two fileds with values
private convertToField(nodeWithField: TanaIntermediateNode, parentNode: TanaIntermediateNode) {
const fullNodeTitle = nodeWithField.name;

// if we have more fields this will be unset after each created field
let currentFieldNode: TanaIntermediateNode | undefined = nodeWithField;

const attributeDefinitions = getAttributeDefinitionsFromName(currentFieldNode.name);

if (!attributeDefinitions.length) {
return;
}

// we support foo::bar and bam::bim on the same line
for (const attrDef of attributeDefinitions) {
const currentFieldValues = [];

if (!currentFieldNode) {
// create a new field since we have multiple
currentFieldNode = this.createNodeForImport({
uid: idgenerator(),
name: attrDef,
createdAt: nodeWithField.createdAt,
editedAt: nodeWithField.editedAt,
});
if (parentNode && parentNode.children) {
parentNode.children.push(currentFieldNode);
}
} else {
currentFieldNode.name = attrDef;
}
currentFieldNode.type = 'field';

const attrValue = getValueForAttribute(attrDef, fullNodeTitle) || '';

const links = getBracketLinks(attrValue, false);
let remainingAttrValue = attrValue;
private convertToField(key: string, value: unknown, node: TanaIntermediateNode) {
this.summary.fields += 1;

for (const link of links) {
if (link.match(DATE_REGEX)) {
continue;
}
const fieldNode = this.createNodeForImport({
uid: idgenerator(),
name: key,
createdAt: node.createdAt,
editedAt: node.editedAt,
type: 'field',
});
const fieldChildren = [];

remainingAttrValue = remainingAttrValue.replace(`[[${link}]]`, '').trim();
}
node.children = node.children ? [...node.children, fieldNode] : [fieldNode];

const wasLinksOnly = remainingAttrValue.length === 0;

if (wasLinksOnly) {
// create node of type field, add values as children
for (const link of links) {
currentFieldValues.push(
this.createNodeForImport({
uid: idgenerator(),
name: `[[${link}]]`, // We link to [[Peter Pan]] etc. It should be found by broken refs later
createdAt: currentFieldNode.createdAt,
editedAt: currentFieldNode.editedAt,
parentNode: currentFieldNode.uid,
}),
);
}
} else {
currentFieldValues.push(
// arrays as property values are references
if (Array.isArray(value)) {
for (const link of value) {
fieldChildren.push(
this.createNodeForImport({
uid: idgenerator(),
name: attrValue,
createdAt: currentFieldNode.createdAt,
editedAt: currentFieldNode.editedAt,
parentNode: currentFieldNode.uid,
name: `[[${link}]]`, // We link to [[Peter Pan]] etc. It should be found by broken refs later
createdAt: fieldNode.createdAt,
editedAt: fieldNode.editedAt,
parentNode: fieldNode.uid,
}),
);
}
}

if (!currentFieldNode.children) {
currentFieldNode.children = [];
}
for (const f of currentFieldValues) {
currentFieldNode.children.push(f);
}
if (typeof value === 'string' || typeof value === 'number') {
fieldChildren.push(
this.createNodeForImport({
uid: idgenerator(),
name: value.toString(),
createdAt: node.createdAt,
editedAt: node.editedAt,
}),
);
}

this.ensureAttrMapIsUpdated(currentFieldNode);
if (!parentNode) {
throw new Error('Cannot create fields without a parent node');
}
if (!fieldNode.children) {
fieldNode.children = [];
}

currentFieldNode = undefined;
for (const child of fieldChildren) {
fieldNode.children.push(child);
}

this.ensureAttrMapIsUpdated(fieldNode);
}

private createNodeForImport(n: {
Expand Down Expand Up @@ -388,6 +324,18 @@ export class LogseqConverter implements IConverter {
this.originalNodeNames.set(node.id, intermediateNode.name);
this.nodesForImport.set(node.id, intermediateNode);

// convert Logseq properties to Tana fields
if (node.properties) {
if (node['page-name'] && hasDuplicateProperties(node, node.children?.[0])) {
// logseq properties appear to be duplicated in the page node and the first child node
// if properties are equal, remove first child
node.children?.shift();
}
for (const [key, value] of Object.entries(node.properties)) {
this.convertToField(key, value, intermediateNode);
}
}

// import any children

if (node.children) {
Expand Down
9 changes: 9 additions & 0 deletions src/converters/logseq/logseqUtils.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { TanaIntermediateNode } from '../../types/types';
import { LogseqBlock } from './types';

// TODO and DONE in roam
const TODO_FLAG = 'TODO';
Expand Down Expand Up @@ -38,3 +39,11 @@ export function replaceRoamSyntax(nameToUse: string) {
}
return nameToUse;
}

export function hasDuplicateProperties(parent?: LogseqBlock, child?: LogseqBlock) {
if (!parent || !child) {
return false;
}

return JSON.stringify(parent.properties) === JSON.stringify(child.properties);
}
59 changes: 59 additions & 0 deletions src/converters/logseq/tests/fixtures/fields.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
{
"version": 1,
"blocks": [
{
"id": "page1",
"page-name": "Test page 1",
"format": "markdown",
"properties": { "test": "Value", "num": "1" },
"children": [
{
"id": "dupeProps1",
"format": "markdown",
"properties": { "test": "Value", "num": "1" },
"children": []
},
{
"id": "child1",
"format": "markdown",
"children": [],
"content": "[[Test page 2]]"
}
]
},
{
"id": "page2",
"page-name": "Test page 2",
"format": "markdown",
"properties": { "refs": "[[Test page 1]]" },
"children": [
{
"id": "dupeProps2",
"format": "markdown",
"properties": { "refs": "[[Test page 1]]" },
"children": []
},
{
"id": "child2",
"format": "markdown",
"children": [],
"content": "test"
}
]
},
{
"id": "page3",
"page-name": "Test page 3",
"format": "markdown",
"children": [
{
"id": "blockAttrs",
"format": "markdown",
"content": "I have attributes",
"properties": { "refs": ["Test page 1", "Test page 2"], "num": 3 },
"children": []
}
]
}
]
}
66 changes: 64 additions & 2 deletions src/converters/logseq/tests/fixtures/smoketest.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"version": 1,
"blocks": [
{
"id": "633f88a0-0d2b-4684-9421-09a72d880203",
"id": "51d9a8f0-5975-4794-a949-8697a479a441",
"page-name": "Contents",
"properties": null,
"children": [
Expand Down Expand Up @@ -61,6 +61,13 @@
"properties": null,
"format": "markdown",
"children": [],
"content": "[[Attributes]]"
},
{
"id": "634a0deb-12ff-42e8-81d9-99c46d607411",
"properties": null,
"format": "markdown",
"children": [],
"content": ""
}
]
Expand Down Expand Up @@ -134,7 +141,7 @@
"properties": null,
"format": "markdown",
"children": [],
"content": "![](https://images.unsplash.com/photo-1665049626763-4462ffb002a1?ixlib=rb-1.2.1&ixid=MnwxMjA3fDB8MHxlZGl0b3JpYWwtZmVlZHwyfHx8ZW58MHx8fHw%3D&auto=format&fit=crop&w=500&q=60)"
"content": "![](https://images.unsplash.com/photo-1665049626763-4462ffb002a1?ixlib=rb-1.2.1&ixid=MnwxMjA3fDB8MHxlZGl0b3JpYWwtZmVlZHwyfHx8ZW58MHx8fHw%3D&auto=format&fit=crop&w=500&q=60) test"
},
{
"id": "633fb2ef-eedd-476c-b568-5a05527fcbbe",
Expand All @@ -144,6 +151,61 @@
"content": ""
}
]
},
{
"id": "634a0de9-7e56-4d73-973e-ef0b3d2da3df",
"page-name": "Attributes",
"properties": {
"ref": ["John Smith"],
"refs": ["John Smith", "Tana test"],
"number": 3,
"text": "Testing",
"tags": ["one", "two", "three"]
},
"format": "markdown",
"children": [
{
"id": "634a0ded-368e-47a6-8473-fcf8be6a9dbd",
"properties": {
"ref": ["John Smith"],
"refs": ["John Smith", "Tana test"],
"number": 3,
"text": "Testing",
"tags": ["one", "two", "three"]
},
"format": "markdown",
"children": [],
"content": ""
},
{
"id": "63531cd7-fcad-4cc7-ab76-c544f8c23ba2",
"properties": null,
"format": "markdown",
"children": [],
"content": ""
},
{
"id": "63531cdb-1a87-4669-bb3f-12a9d179026c",
"properties": { "number": 5, "ref": ["John Smith"], "tags": ["one", "two three"] },
"format": "markdown",
"children": [],
"content": "block-level attributes"
}
]
},
{
"id": "63531be2-4649-474f-8d2f-0891848a1b16",
"page-name": "John Smith",
"properties": { "alias": ["John"], "text": "testing" },
"children": [
{
"id": "63531be2-8a49-4ce3-b8da-472dcb7ba816",
"properties": { "alias": ["John"], "text": "testing" },
"format": "markdown",
"children": [],
"content": ""
}
]
}
]
}
Loading

0 comments on commit 76a81a8

Please sign in to comment.