Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support Array of ParseFiles #65

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions config.example.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,20 @@ module.exports = {
mongoURL: "mongodb://<username>:<password>@mongourl.com:27017/database_name",
serverURL: "https://api.customparseserver.com/parse",
filesToTransfer: 'parseOnly',
renameFiles: false,
renameInDatabase: false,
transferTo: 'filesystem',

// If false, will migrate files array as well
// Note: The parameters renameFiles and renameInDatabase must be true
onlyFiles: false,

// Extra collection and fields that use files array, use to migrate them as well
// Note: Use if onlyFiles equals false
extraFields: {
collectionName: ['fieldNameOne', 'fieldNameTwo']
},

// For filesystem configuration
filesystemPath: './downloaded_files',

Expand Down
108 changes: 87 additions & 21 deletions lib/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ var schemas = require('./schemas');
var transfer = require('./transfer');
var questions = require('./questions.js');

var stringFileRegex = new RegExp(/^(?=.*\bhttp\b)(?=.*\bparsetfss\b)(?=.*\btfss-\b).*$/);

module.exports = initialize;

function initialize(config) {
Expand All @@ -26,7 +28,7 @@ function initialize(config) {
Parse.serverURL = config.serverURL;
return transfer.init(config);
}).then(function() {
return getAllFileObjects();
return getAllFileObjects(config);
}).then(function(objects) {
return transfer.run(objects);
}).then(function() {
Expand All @@ -38,11 +40,15 @@ function initialize(config) {
});
}

function getAllFileObjects() {
function getAllFileObjects(config) {
console.log("Fetching schema...");
return schemas.get().then(function(res){
console.log("Fetching all objects with files...");
var schemasWithFiles = onlyFiles(res);
if (config.onlyFiles){
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

could we do this feature 'additive' instead of file or array? Like just files, or files + arrays?

var schemasWithFiles = onlyFiles(res);
} else {
var schemasWithFiles = stringAndFilesArray(res, config.extraFields);
}
return Promise.all(schemasWithFiles.map(getObjectsWithFilesFromSchema));
}).then(function(results) {
var files = results.reduce(function(c, r) {
Expand All @@ -57,16 +63,40 @@ function getAllFileObjects() {

function onlyFiles(schemas) {
return schemas.map(function(schema) {
var fileFields = Object.keys(schema.fields).filter(function(key){
var value = schema.fields[key];
return value.type == "File";
});
if (fileFields.length > 0) {
return {
className: schema.className,
fields: fileFields
}
}
var fileFields = Object.keys(schema.fields).filter(function(key){
var value = schema.fields[key];
return value.type == "File";
});
if (fileFields.length > 0) {
return {
className: schema.className,
fields: fileFields
}
}
}).filter(function(s){ return s != undefined })
}

function stringAndFilesArray(schemas, extra) {
return schemas.map(function(schema) {
var fileFields = Object.keys(schema.fields).filter(function(key){
var value = schema.fields[key];

var is_valid = false;

if (extra[schema.className] != undefined){
if (extra[schema.className].indexOf(key) != -1){
is_valid = true;
}
}

return is_valid;
});
if (fileFields.length > 0) {
return {
className: schema.className,
fields: fileFields
}
}
}).filter(function(s){ return s != undefined })
}

Expand Down Expand Up @@ -95,25 +125,61 @@ function getObjectsWithFilesFromSchema(schema) {
query.limit(1000);

var checks = schema.fields.map(function(field) {
return new Parse.Query(schema.className).exists(field);
return new Parse.Query(schema.className).exists(field);
});
query._orQuery(checks);

return getAllObjects(query).then(function(results) {
return results.reduce(function(current, result){
return current.concat(
schema.fields.map(function(field){
var fileResults = [];
schema.fields.map(function(field){
if (Array.isArray(result.get(field))) {
for (var i = 0; i < result.get(field).length; i++) {
if (result.get(field)[i].name !== undefined){
var fName = result.get(field)[i] ? result.get(field)[i].name() : 'DELETE';
var fUrl = result.get(field)[i] ? result.get(field)[i].url() : 'DELETE';

fileResults.push({
className: schema.className,
objectId: result.id,
fieldName: field,
fileName: fName,
url: fUrl,
i: i
});
}
}
} else if ((typeof result.get(field) === 'string') && (stringFileRegex.test(result.get(field)))){
var fUrl = result.get(field) ? result.get(field) : 'DELETE';
if (fUrl != 'DELETE') {
var splitString = fUrl.split("/");
var fName = splitString[splitString.length-1];
} else {
var fName = 'DELETE';
}
fileResults.push({
className: schema.className,
objectId: result.id,
fieldName: field,
fileName: fName,
url: fUrl,
i: 0,
type: 'string'
})
} else if (typeof result.get(field) != 'string') {
var fName = result.get(field) ? result.get(field).name() : 'DELETE';
var fUrl = result.get(field) ? result.get(field).url() : 'DELETE';
return {
fileResults.push({
className: schema.className,
objectId: result.id,
fieldName: field,
fileName: fName,
url: fUrl
}
})
);
url: fUrl,
i: 0
})
}
});
return current.concat(fileResults);
}, []);
});
}
76 changes: 65 additions & 11 deletions lib/transfer.js
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ function _processFiles(files, handler) {
var asyncLimit = config.asyncLimit || 5;
return new Promise(function(resolve, reject) {
async.eachOfLimit(files, asyncLimit, function(file, index, callback) {
process.stdout.write('Processing '+(index+1)+'/'+files.length+'\r');
process.stdout.write('Processing '+(index+1)+'/'+files.length+'\r');
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

oops!

file.newFileName = _createNewFileName(file.fileName);
if (_shouldTransferFile(file)) {
_transferFile(file).then(callback, callback);
Expand All @@ -156,18 +156,72 @@ function _changeDBFileField(file) {
if (file.fileName == file.newFileName || !config.renameInDatabase) {
return resolve();
}

if (config.transferTo === 'filesystem'){
var _path = config.filesAdapter._filesDir + "/" + file.newFileName;
} else if (config.transferTo === 's3'){
var _path = "https://" + config.filesAdapter._bucket + "." + config.filesAdapter._s3Client.config.endpoint + "/" + config.filesAdapter._bucketPrefix + file.newFileName;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

there should be getFileLocation https://github.com/parse-server-modules/parse-server-s3-adapter/blob/master/index.js#L122 available from the S3 adapter, can we use that instead?

}

var update = {$set:{}};
update.$set[file.fieldName] = file.newFileName;
db.collection(file.className).update(
{ _id : file.objectId },
update,
function(error, result ) {
if (error) {
return reject(error);
}
resolve();

if ((config.extraFields[file.className] != undefined) && (config.extraFields[file.className].indexOf(file.fieldName) != -1)) {

if (file.type === 'string') {
update.$set[file.fieldName] = _path;
db.collection(file.className).update(
{ _id : file.objectId },
update,
function(error, result) {
if (error) {
return reject(error);
}
resolve();
}
)
} else {
db.collection(file.className).findOne({_id: file.objectId}).then(function(result){
var images = result[file.fieldName];

images[file.i] = {
__type: 'File',
name: file.newFileName,
url: _path
};

update.$set[file.fieldName] = images;
db.collection(file.className).update(
{ _id : file.objectId },
update,
function(error, result) {
if (error) {
return reject(error);
}
resolve();
}
);
});
}
);

} else {

update.$set[file.fieldName] = {
__type: 'File',
name: file.newFileName,
url: _path
};

db.collection(file.className).update(
{ _id : file.objectId },
update,
function(error, result ) {
if (error) {
return reject(error);
}
resolve();
}
);
}
});
}

Expand Down