Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
a276421
WIP #1245 add storage abstraction for different backends
brollb Oct 9, 2019
df00059
WIP Update pipeline seed to use userAsset attribute type
brollb Oct 9, 2019
b903936
WIP Update ImportArtifact to use Storage adapter
brollb Oct 9, 2019
290fde5
WIP Add migration for data nodes
brollb Oct 9, 2019
b7c2869
WIP Add getMetadata, getDownloadURL
brollb Oct 9, 2019
b970205
WIP Add storage location to ImportArtifact plugin
brollb Oct 9, 2019
01f7508
WIP Updated createDataInfo to new method sig
brollb Oct 9, 2019
6140afb
Merge branch 'master' into 1245-custom-storage-backends
brollb Oct 9, 2019
9014cee
WIP #1245 update GenerateJob and generated files
brollb Oct 10, 2019
80872f4
WIP #1245 Update ArtifactIndex
brollb Oct 10, 2019
4154330
WIP Upload data to storage location from the worker
brollb Oct 10, 2019
b6c5a25
WIP #1245 Updated worker cache (restoring useBlob fn-ality)
brollb Oct 10, 2019
4924756
WIP #1245 Only create results.json on successful completion
brollb Oct 10, 2019
c9aedec
WIP #1245 Update Export plugin
brollb Oct 10, 2019
934d987
Merge branch 'master' into 1245-custom-storage-backends
brollb Oct 11, 2019
f08a9e8
WIP switched arg order for Storage
brollb Oct 11, 2019
9929ff7
WIP #1245 update export branch functionality to collect correct assets
brollb Oct 11, 2019
18cd483
WIP Set userAsset hashes correctly
brollb Oct 11, 2019
a97fdb4
WIP Remove in-memory storage backend
brollb Oct 11, 2019
081e439
WIP Remove unused method
brollb Oct 11, 2019
fd06433
WIP Add tests for ExportBranch
brollb Oct 11, 2019
8d3ece5
WIP add default config for Storage backend
brollb Oct 11, 2019
09d9521
WIP Remove extra comments and console.logs
brollb Oct 11, 2019
5146255
WIP Only check pipeline library version if it exists
brollb Oct 11, 2019
20ed000
WIP addressing remaining TODOs and FIXMEs
brollb Oct 11, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 0 additions & 7 deletions bin/deepforge
Original file line number Diff line number Diff line change
Expand Up @@ -102,13 +102,6 @@ var storeConfig = function(id, value) {
var cKey = envToConf[env];
process.env[env] = process.env[env] || p(getConfigValue(cKey));
});

// Special cases
if (process.env.DEEPFORGE_WORKER_USE_BLOB === 'true' &&
exists.sync(process.env.DEEPFORGE_BLOB_DIR)) {

process.env.DEEPFORGE_WORKER_CACHE = process.env.DEEPFORGE_BLOB_DIR + '/wg-content';
}
})();

program
Expand Down
46 changes: 35 additions & 11 deletions bin/start-worker.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,9 @@ var path = require('path'),
config = {};

var createDir = function(dir) {
if (path.dirname(dir) !== dir) {
createDir(path.dirname(dir));
}
try {
fs.statSync(dir);
} catch (e) {
Expand All @@ -25,18 +28,20 @@ var createDir = function(dir) {
}
return false;
};
createDir(workerRootPath);
createDir(workerPath);

const symlink = function(origin, link) {
try {
fs.statSync(link);
} catch (e) {
childProcess.spawnSync('ln', ['-s', origin, link]);
}
};

createDir(workerTmp);

// Create sym link to the node_modules
var modules = path.join(workerRootPath, 'node_modules');
try {
fs.statSync(modules);
} catch (e) {
// Create dir
childProcess.spawnSync('ln', ['-s', `${__dirname}/../node_modules`, modules]);
}
// Create sym link to the node_modules and to deepforge
const modules = path.join(workerRootPath, 'node_modules');
symlink(`${__dirname}/../node_modules`, modules);

var cleanUp = function() {
console.log('removing worker directory ', workerPath);
Expand All @@ -47,12 +52,31 @@ var startExecutor = function() {
process.on('SIGINT', cleanUp);
process.on('uncaughtException', cleanUp);

// Configure the cache
const blobDir = process.env.DEEPFORGE_BLOB_DIR;
const isSharingBlob = process.env.DEEPFORGE_WORKER_USE_BLOB === 'true' &&
!!blobDir;

if (process.env.DEEPFORGE_WORKER_CACHE && isSharingBlob) {
// Create the cache directory and symlink the blob in cache/gme
createDir(process.env.DEEPFORGE_WORKER_CACHE);

const blobContentDir = path.join(blobDir, 'wg-content');
const gmeStorageCache = path.join(process.env.DEEPFORGE_WORKER_CACHE, 'gme');
rm_rf.sync(gmeStorageCache);
symlink(blobContentDir, gmeStorageCache);
}

// Start the executor
const env = Object.assign({}, process.env);
env.DEEPFORGE_ROOT = path.join(__dirname, '..');

const options = {env: env};
var execJob = spawn('node', [
executorSrc,
workerConfigPath,
workerTmp
]);
], options);
execJob.stdout.pipe(process.stdout);
execJob.stderr.pipe(process.stderr);
};
Expand Down
3 changes: 3 additions & 0 deletions config/components.json
Original file line number Diff line number Diff line change
Expand Up @@ -132,5 +132,8 @@
},
"Compute": {
"backends": ["gme", "local"]
},
"Storage": {
"backends": ["gme"]
}
}
75 changes: 37 additions & 38 deletions src/common/globals.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
/* globals WebGMEGlobal, define*/
// This file creates the DeepForge namespace and defines basic actions
define([
'deepforge/storage/index',
'deepforge/viz/ConfigDialog',
'panel/FloatingActionButton/styles/Materialize',
'text!./NewOperationCode.ejs',
'js/RegistryKeys',
Expand All @@ -9,6 +11,8 @@ define([
'underscore',
'q'
], function(
Storage,
ConfigDialog,
Materialize,
DefaultCodeTpl,
REGISTRY_KEYS,
Expand Down Expand Up @@ -224,43 +228,6 @@ define([
});
};

// Creating Artifacts
var UPLOAD_PLUGIN = 'ImportArtifact';

var uploadArtifact = function() {
// Get the data types
var dataBase,
dataBaseId,
metanodes = client.getAllMetaNodes(),
dataTypes = [];

dataBase = metanodes.find(n => n.getAttribute('name') === 'Data');

if (!dataBase) {
this.logger.error('Could not find the base Data node!');
return;
}

dataBaseId = dataBase.getId();
dataTypes = metanodes.filter(n => n.isTypeOf(dataBaseId))
.filter(n => !n.getRegistry('isAbstract'))
.map(node => node.getAttribute('name'));

// Add the target type to the pluginMetadata...
var metadata = WebGMEGlobal.allPluginsMetadata[UPLOAD_PLUGIN];

WebGMEGlobal.InterpreterManager.configureAndRun(metadata, result => {
var msg = 'Artifact upload complete!';
if (!result) {
return;
}
if (!result.success) {
msg = `Artifact upload failed: ${result.error}`;
}
Materialize.toast(msg, 2000);
});
};

DeepForge.last = {};
DeepForge.create = {};
DeepForge.register = {};
Expand All @@ -280,7 +247,39 @@ define([
};
});

DeepForge.create.Artifact = uploadArtifact;
// Creating Artifacts
const UPLOAD_PLUGIN = 'ImportArtifact';
const copy = data => JSON.parse(JSON.stringify(data));
DeepForge.create.Artifact = async function() {
const metadata = copy(WebGMEGlobal.allPluginsMetadata[UPLOAD_PLUGIN]);

metadata.configStructure.unshift({
name: 'artifactOptions',
displayName: 'New Artifact',
valueType: 'section'
});

if (Storage.getAvailableBackends().length > 1) {
metadata.configStructure.push({
name: 'storageOptions',
displayName: 'Storage',
valueType: 'section'
});
metadata.configStructure.push({
name: 'storage',
displayName: 'Storage Location',
valueType: 'string',
valueItems: Storage.getAvailableBackends(),
value: Storage.getAvailableBackends()[0],
});
}

const configDialog = new ConfigDialog(client);
const allConfigs = await configDialog.show(metadata);
const context = client.getCurrentPluginContext(UPLOAD_PLUGIN);
context.pluginConfig = allConfigs[UPLOAD_PLUGIN];
return await Q.ninvoke(client, 'runBrowserPlugin', UPLOAD_PLUGIN, context);
};

//////////////////// DeepForge prev locations ////////////////////
// Update DeepForge on project changed
Expand Down
76 changes: 76 additions & 0 deletions src/common/plugin/GeneratedFiles.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
/*globals define*/
define([
'common/util/assert',
'deepforge/storage/index',
], function(
assert,
Storage,
) {

const GeneratedFiles = function(blobClient) {
this.blobClient = blobClient;
this._files = {};
this._data = {};
};

GeneratedFiles.prototype.addUserAsset = function (path, dataInfo) {
assert(!!dataInfo, `Adding undefined user asset: ${path}`);
dataInfo = typeof dataInfo === 'object' ? dataInfo : JSON.parse(dataInfo);
this._data[path] = dataInfo;
};

GeneratedFiles.prototype.getUserAssetPaths = function () {
return Object.keys(this._data);
};

GeneratedFiles.prototype.getUserAsset = function (path) {
return this._data[path];
};

GeneratedFiles.prototype.getUserAssets = function () {
return Object.entries(this._data);
};

GeneratedFiles.prototype.addFile = function (path, contents) {
this._files[path] = contents;
};

GeneratedFiles.prototype.appendToFile = function (path, contents) {
this._files[path] = (this._files[path] || '') + contents;
};

GeneratedFiles.prototype.getFile = function (path) {
return this._files[path];
};

GeneratedFiles.prototype.getFilePaths = function () {
return Object.keys(this._files);
};

GeneratedFiles.prototype.remove = function (path) {
delete this._files[path];
delete this._data[path];
};

GeneratedFiles.prototype.save = async function (artifactName) {
const artifact = this.blobClient.createArtifact(artifactName);

// Transfer the data files to the blob and create an artifact
const userAssets = this.getUserAssets();
if (userAssets.length) {
const objectHashes = {};
for (let i = userAssets.length; i--;) {
const [filepath, dataInfo] = userAssets[i];
const contents = await Storage.getFile(dataInfo);
const filename = filepath.split('/').pop();
const hash = await this.blobClient.putFile(filename, contents);
objectHashes[filepath] = hash;
}
await artifact.addObjectHashes(objectHashes);
}
await artifact.addFiles(this._files);
return await artifact.save();
};

return GeneratedFiles;
});
33 changes: 33 additions & 0 deletions src/common/storage/backends/StorageBackend.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
/* globals define */
define([
'module',
'q',
], function(
module,
Q,
) {

const StorageBackend = function(id, metadata) {
const {name, client} = metadata;
this.id = id;
this.name = name;
this.clientPath = client || './Client';
};

StorageBackend.prototype.getClient = async function(logger, config) {
//if (require.isBrowser) {
//throw new Error('Storage clients cannot be loaded in the browser.');
//}

const Client = await this.require(`deepforge/storage/backends/${this.id}/${this.clientPath}`);
return new Client(this.id, this.name, logger, config);
};

StorageBackend.prototype.require = function(path) { // helper for loading async
const deferred = Q.defer();
require([path], deferred.resolve, deferred.reject);
return deferred.promise;
};

return StorageBackend;
});
44 changes: 44 additions & 0 deletions src/common/storage/backends/StorageClient.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
/* globals define, WebGMEGlobal */
define([
'client/logger'
], function(
Logger
) {
const StorageClient = function(id, name, logger) {
this.id = id;
this.name = name;
if (!logger) {
let gmeConfig;
if (require.isBrowser) {
gmeConfig = WebGMEGlobal.gmeConfig;
} else {
gmeConfig = require.nodeRequire('../../../../config');
}
logger = Logger.create(`gme:storage:${id}`, gmeConfig.client.log);
}
this.logger = logger.fork(`storage:${id}`);
};

StorageClient.prototype.getFile = async function() {
throw new Error(`File download not implemented for ${this.name}`);
};

StorageClient.prototype.putFile = async function() {
throw new Error(`File upload not supported by ${this.name}`);
};

StorageClient.prototype.getDownloadURL = async function() {
// TODO: Remove this in favor of directly downloading w/ getFile, etc
throw new Error(`getDownloadURL not implemented for ${this.name}`);
};

StorageClient.prototype.getMetadata = async function() {
throw new Error(`getDownloadURL not implemented for ${this.name}`);
};

StorageClient.prototype.createDataInfo = function(data) {
return {backend: this.id, data};
};

return StorageClient;
});
Loading