2006-04-17 22:29:29 +02:00
|
|
|
import gtk
|
2006-04-25 11:53:35 +02:00
|
|
|
from system import dataset, logger, plots, workflow
|
2006-04-20 12:27:58 +02:00
|
|
|
#import geneontology
|
|
|
|
#import gostat
|
2006-04-24 16:52:21 +02:00
|
|
|
from scipy import array,randn,log
|
2006-04-22 18:27:33 +02:00
|
|
|
import cPickle
|
2006-04-17 22:29:29 +02:00
|
|
|
|
2006-07-21 16:30:09 +02:00
|
|
|
class TestWorkflow (workflow.Workflow):
|
2006-04-17 22:29:29 +02:00
|
|
|
|
2006-07-21 16:30:09 +02:00
|
|
|
name = 'Test Workflow'
|
|
|
|
ident = 'test'
|
|
|
|
description = 'Test Gene Ontology Workflow. This workflow currently serves as a general testing workflow.'
|
2006-04-27 14:41:51 +02:00
|
|
|
|
2006-04-17 22:29:29 +02:00
|
|
|
def __init__(self, app):
|
2006-04-24 16:52:21 +02:00
|
|
|
workflow.Workflow.__init__(self, app)
|
2006-04-17 22:29:29 +02:00
|
|
|
|
2006-04-24 16:52:21 +02:00
|
|
|
load = workflow.Stage('load', 'Load Data')
|
2006-04-22 02:17:22 +02:00
|
|
|
load.add_function(CelFileImportFunction())
|
2006-04-20 16:29:13 +02:00
|
|
|
load.add_function(TestDataFunction())
|
2006-04-22 18:27:33 +02:00
|
|
|
load.add_function(DatasetLoadFunction())
|
2006-04-17 22:29:29 +02:00
|
|
|
self.add_stage(load)
|
|
|
|
|
2006-04-24 16:52:21 +02:00
|
|
|
preproc = workflow.Stage('preprocess', 'Preprocessing')
|
|
|
|
preproc.add_function(DatasetLog())
|
|
|
|
preproc.add_function(workflow.Function('rma', 'RMA'))
|
2006-04-17 22:29:29 +02:00
|
|
|
self.add_stage(preproc)
|
|
|
|
|
2006-04-24 16:52:21 +02:00
|
|
|
go = workflow.Stage('go', 'Gene Ontology Data')
|
2006-04-19 21:59:55 +02:00
|
|
|
go.add_function(GODistanceFunction())
|
2006-04-17 22:29:29 +02:00
|
|
|
self.add_stage(go)
|
|
|
|
|
2006-04-24 16:52:21 +02:00
|
|
|
regression = workflow.Stage('regression', 'Regression')
|
|
|
|
regression.add_function(workflow.Function('pls', 'PLS'))
|
2006-04-17 22:29:29 +02:00
|
|
|
self.add_stage(regression)
|
2006-04-22 18:27:33 +02:00
|
|
|
|
2006-04-24 16:52:21 +02:00
|
|
|
explore = workflow.Stage('explore', 'Explorative analysis')
|
2006-04-24 16:07:34 +02:00
|
|
|
explore.add_function(PCAFunction(self))
|
|
|
|
self.add_stage(explore)
|
|
|
|
|
2006-04-24 16:52:21 +02:00
|
|
|
save = workflow.Stage('save', 'Save Data')
|
2006-04-22 18:27:33 +02:00
|
|
|
save.add_function(DatasetSaveFunction())
|
|
|
|
self.add_stage(save)
|
2006-04-17 22:29:29 +02:00
|
|
|
|
2006-04-24 16:52:21 +02:00
|
|
|
class LoadAnnotationsFunction(workflow.Function):
|
2006-04-17 22:29:29 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
2006-04-24 16:52:21 +02:00
|
|
|
workflow.Function.__init__(self, 'load-go-ann', 'Load Annotations')
|
2006-04-17 22:29:29 +02:00
|
|
|
self.annotations = None
|
|
|
|
|
|
|
|
def load_file(self, filename):
|
|
|
|
f = open(filename)
|
|
|
|
self.annotations = Annotations('genes', 'go-terms')
|
2006-04-19 21:59:55 +02:00
|
|
|
logger.log('notice', 'Loading annotation file: %s' % filename)
|
2006-04-17 22:29:29 +02:00
|
|
|
|
|
|
|
for line in f.readlines():
|
|
|
|
val = line.split(' \t')
|
|
|
|
|
|
|
|
if len(val) > 1:
|
|
|
|
val = [v.strip() for v in val]
|
|
|
|
retval.add_annotations('genes', val[0],
|
|
|
|
'go-terms', set(val[1:]))
|
|
|
|
|
|
|
|
def on_response(self, dialog, response):
|
|
|
|
if response == gtk.RESPONSE_OK:
|
|
|
|
logger.log('notice', 'Reading file: %s' % dialog.get_filename())
|
|
|
|
self.load_file(dialog.get_filename())
|
|
|
|
|
2006-05-03 13:52:54 +02:00
|
|
|
def run(self):
|
2006-04-17 22:29:29 +02:00
|
|
|
btns = ('Open', gtk.RESPONSE_OK, \
|
|
|
|
'Cancel', gtk.RESPONSE_CANCEL)
|
|
|
|
dialog = gtk.FileChooserDialog('Open GO Annotation File',
|
|
|
|
buttons=btns)
|
|
|
|
dialog.connect('response', self.on_response)
|
|
|
|
dialog.run()
|
|
|
|
dialog.destroy()
|
|
|
|
return [self.annotations]
|
|
|
|
|
2006-04-24 16:52:21 +02:00
|
|
|
class GODistanceFunction(workflow.Function):
|
2006-04-19 21:59:55 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
2006-04-24 16:52:21 +02:00
|
|
|
workflow.Function.__init__(self, 'go_diatance', 'GO Distances')
|
2006-04-19 21:59:55 +02:00
|
|
|
self.output = None
|
|
|
|
|
2006-05-03 13:52:54 +02:00
|
|
|
def run(self, data):
|
2006-04-19 21:59:55 +02:00
|
|
|
logger.log('debug', 'datatype: %s' % type(data))
|
|
|
|
if not type(data) == Annotations:
|
|
|
|
return None
|
|
|
|
|
|
|
|
logger.log('debug', 'dimensions: %s' % data.dimensions)
|
|
|
|
|
|
|
|
genes = data.get_ids('genes')
|
|
|
|
gene_distances = array((len(genes), len(genes)))
|
|
|
|
|
|
|
|
return gene_distances
|
|
|
|
|
2006-04-21 16:58:42 +02:00
|
|
|
|
2006-04-24 16:52:21 +02:00
|
|
|
class TestDataFunction(workflow.Function):
|
2006-04-20 16:29:13 +02:00
|
|
|
def __init__(self):
|
2006-04-24 16:52:21 +02:00
|
|
|
workflow.Function.__init__(self, 'test_data', 'Generate Test Data')
|
2006-04-20 16:29:13 +02:00
|
|
|
|
2006-05-03 13:52:54 +02:00
|
|
|
def run(self):
|
2006-04-20 16:29:13 +02:00
|
|
|
logger.log('notice', 'Injecting foo test data')
|
2006-04-21 12:56:01 +02:00
|
|
|
x = randn(20,30)
|
2006-04-24 11:53:07 +02:00
|
|
|
X = dataset.Dataset(x)
|
2006-07-21 16:30:09 +02:00
|
|
|
p = plots.ScatterPlot(X, X, 'rows', 'rows', '0_1', '0_2')
|
|
|
|
return [X, plots.SinePlot(), p]
|
2006-04-22 02:17:22 +02:00
|
|
|
|
2006-04-24 16:52:21 +02:00
|
|
|
class DatasetLog(workflow.Function):
|
|
|
|
def __init__(self):
|
|
|
|
workflow.Function.__init__(self, 'log', 'Log')
|
|
|
|
|
2006-05-03 13:52:54 +02:00
|
|
|
def run(self, data):
|
2006-04-24 16:52:21 +02:00
|
|
|
logger.log('notice', 'Taking the log of dataset %s' % data.get_name())
|
|
|
|
d = data.asarray()
|
|
|
|
d = log(d)
|
2006-04-24 17:20:27 +02:00
|
|
|
new_data_name = 'log(%s)' % data.get_name()
|
|
|
|
ds = dataset.Dataset(d, name=new_data_name)
|
|
|
|
return [ds]
|
2006-04-22 02:17:22 +02:00
|
|
|
|
2006-04-24 16:52:21 +02:00
|
|
|
class DatasetLoadFunction(workflow.Function):
|
2006-04-22 18:27:33 +02:00
|
|
|
"""Loader for previously pickled Datasets."""
|
|
|
|
def __init__(self):
|
2006-04-24 16:52:21 +02:00
|
|
|
workflow.Function.__init__(self, 'load_data', 'Load Pickled Dataset')
|
2006-04-22 18:27:33 +02:00
|
|
|
|
2006-05-03 13:52:54 +02:00
|
|
|
def run(self):
|
2006-04-22 18:27:33 +02:00
|
|
|
chooser = gtk.FileChooserDialog(title="Select cel files...", parent=None,
|
|
|
|
action=gtk.FILE_CHOOSER_ACTION_OPEN,
|
|
|
|
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
|
|
|
|
gtk.STOCK_OPEN, gtk.RESPONSE_OK))
|
|
|
|
pkl_filter = gtk.FileFilter()
|
|
|
|
pkl_filter.set_name("Python pickled data files (*.pkl)")
|
|
|
|
pkl_filter.add_pattern("*.[pP][kK][lL]")
|
|
|
|
all_filter = gtk.FileFilter()
|
|
|
|
all_filter.set_name("All Files (*.*)")
|
|
|
|
all_filter.add_pattern("*")
|
|
|
|
chooser.add_filter(pkl_filter)
|
|
|
|
chooser.add_filter(all_filter)
|
|
|
|
|
|
|
|
try:
|
|
|
|
if chooser.run() == gtk.RESPONSE_OK:
|
|
|
|
return [cPickle.load(open(chooser.get_filename()))]
|
|
|
|
finally:
|
|
|
|
chooser.destroy()
|
|
|
|
|
|
|
|
|
2006-04-24 16:52:21 +02:00
|
|
|
class DatasetSaveFunction(workflow.Function):
|
2006-04-22 18:27:33 +02:00
|
|
|
"""QND way to save data to file for later import to this program."""
|
|
|
|
def __init__(self):
|
2006-04-24 16:52:21 +02:00
|
|
|
workflow.Function.__init__(self, 'save_data', 'Save Pickled Dataset')
|
2006-04-22 18:27:33 +02:00
|
|
|
|
2006-05-03 13:52:54 +02:00
|
|
|
def run(self):
|
2006-04-22 18:27:33 +02:00
|
|
|
if not data:
|
|
|
|
logger.log("notice", "No data to save.")
|
|
|
|
return
|
2006-05-03 13:11:45 +02:00
|
|
|
else:
|
|
|
|
data = data[0]
|
2006-04-22 18:27:33 +02:00
|
|
|
chooser = gtk.FileChooserDialog(title="Save pickled data...", parent=None,
|
|
|
|
action=gtk.FILE_CHOOSER_ACTION_SAVE,
|
|
|
|
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
|
|
|
|
gtk.STOCK_SAVE, gtk.RESPONSE_OK))
|
|
|
|
pkl_filter = gtk.FileFilter()
|
|
|
|
pkl_filter.set_name("Python pickled data files (*.pkl)")
|
|
|
|
pkl_filter.add_pattern("*.[pP][kK][lL]")
|
|
|
|
all_filter = gtk.FileFilter()
|
|
|
|
all_filter.set_name("All Files (*.*)")
|
|
|
|
all_filter.add_pattern("*")
|
|
|
|
chooser.add_filter(pkl_filter)
|
|
|
|
chooser.add_filter(all_filter)
|
|
|
|
chooser.set_current_name(data.get_name() + ".pkl")
|
|
|
|
|
|
|
|
try:
|
|
|
|
if chooser.run() == gtk.RESPONSE_OK:
|
2006-04-22 20:11:31 +02:00
|
|
|
cPickle.dump(data, open(chooser.get_filename(), "w"), protocol=2)
|
2006-04-22 18:27:33 +02:00
|
|
|
logger.log("notice", "Saved data to %r." % chooser.get_filename())
|
|
|
|
finally:
|
|
|
|
chooser.destroy()
|
|
|
|
|
|
|
|
|
2006-04-24 16:52:21 +02:00
|
|
|
class CelFileImportFunction(workflow.Function):
|
2006-04-22 17:59:19 +02:00
|
|
|
"""Loads AffyMetrix .CEL-files into matrix."""
|
2006-04-22 02:17:22 +02:00
|
|
|
def __init__(self):
|
2006-04-24 16:52:21 +02:00
|
|
|
workflow.Function.__init__(self, 'cel_import', 'Import Affy')
|
2006-04-22 02:17:22 +02:00
|
|
|
|
2006-05-03 13:52:54 +02:00
|
|
|
def run(self, data):
|
2006-04-22 22:48:44 +02:00
|
|
|
import rpy
|
2006-04-22 02:17:22 +02:00
|
|
|
chooser = gtk.FileChooserDialog(title="Select cel files...", parent=None,
|
|
|
|
action=gtk.FILE_CHOOSER_ACTION_OPEN,
|
|
|
|
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
|
|
|
|
gtk.STOCK_OPEN, gtk.RESPONSE_OK))
|
|
|
|
chooser.set_select_multiple(True)
|
|
|
|
cel_filter = gtk.FileFilter()
|
|
|
|
cel_filter.set_name("Cel Files (*.cel)")
|
|
|
|
cel_filter.add_pattern("*.[cC][eE][lL]")
|
|
|
|
all_filter = gtk.FileFilter()
|
|
|
|
all_filter.set_name("All Files (*.*)")
|
|
|
|
all_filter.add_pattern("*")
|
|
|
|
chooser.add_filter(cel_filter)
|
|
|
|
chooser.add_filter(all_filter)
|
|
|
|
|
2006-04-22 19:59:15 +02:00
|
|
|
try:
|
|
|
|
if chooser.run() == gtk.RESPONSE_OK:
|
|
|
|
rpy.r.library("affy")
|
|
|
|
|
|
|
|
silent_eval = rpy.with_mode(rpy.NO_CONVERSION, rpy.r)
|
|
|
|
silent_eval('E <- ReadAffy(filenames=c("%s"))' % '", "'.join(chooser.get_filenames()))
|
2006-04-24 16:07:34 +02:00
|
|
|
silent_eval('E <- rma(E)')
|
2006-04-22 19:59:15 +02:00
|
|
|
|
|
|
|
m = rpy.r('m <- E@exprs')
|
|
|
|
|
|
|
|
vector_eval = rpy.with_mode(rpy.VECTOR_CONVERSION, rpy.r)
|
|
|
|
rownames = vector_eval('rownames(m)')
|
|
|
|
colnames = vector_eval('colnames(m)')
|
|
|
|
|
|
|
|
# We should be nice and clean up after ourselves
|
|
|
|
rpy.r.rm(["E", "m"])
|
|
|
|
|
|
|
|
if m:
|
2006-04-27 13:03:11 +02:00
|
|
|
data = dataset.Dataset(m, (('ids', rownames), ('filename', colnames)), name="AffyMatrix Data")
|
|
|
|
plot = plots.LinePlot(data, "Gene profiles")
|
|
|
|
return [data, plot]
|
2006-04-22 19:59:15 +02:00
|
|
|
else:
|
|
|
|
logger.log("notice", "No data loaded from importer.")
|
|
|
|
finally:
|
2006-04-22 02:17:22 +02:00
|
|
|
chooser.destroy()
|
2006-04-24 16:07:34 +02:00
|
|
|
|
|
|
|
|
2006-04-24 16:52:21 +02:00
|
|
|
class PCAFunction(workflow.Function):
|
2006-04-24 16:07:34 +02:00
|
|
|
"""Generic PCA function."""
|
2006-04-24 16:52:21 +02:00
|
|
|
def __init__(self, wf):
|
|
|
|
workflow.Function.__init__(self, 'pca', 'PCA')
|
|
|
|
self._workflow = wf
|
2006-04-24 16:07:34 +02:00
|
|
|
|
2006-05-03 13:52:54 +02:00
|
|
|
def run(self, data):
|
2006-04-24 16:07:34 +02:00
|
|
|
import rpy
|
|
|
|
|
|
|
|
dim_2, dim_1 = data.get_dim_names()
|
|
|
|
|
|
|
|
|
|
|
|
silent_eval = rpy.with_mode(rpy.NO_CONVERSION, rpy.r)
|
2006-05-05 16:43:53 +02:00
|
|
|
rpy.with_mode(rpy.NO_CONVERSION, rpy.r.assign)("m", data.asarray())
|
2006-04-24 16:07:34 +02:00
|
|
|
silent_eval("t = prcomp(t(m))")
|
|
|
|
|
|
|
|
T_ids = map(str, range(1, rpy.r("dim(t$x)")[1]+1))
|
|
|
|
T = dataset.Dataset(rpy.r("t$x"), [(dim_1, data.get_identifiers(dim_1)),
|
|
|
|
("component", T_ids)], name="T")
|
|
|
|
P = dataset.Dataset(rpy.r("t$rotation"), [(dim_2, data.get_identifiers(dim_2)),
|
|
|
|
("component", T_ids)], name="P")
|
|
|
|
|
|
|
|
# cleanup
|
|
|
|
rpy.r.rm(["t", "m"])
|
|
|
|
|
2006-05-09 15:51:14 +02:00
|
|
|
loading_plot = plots.ScatterPlot(P, P, 'ids','component','1','2', "Loadings")
|
2006-05-09 15:17:17 +02:00
|
|
|
score_plot = plots.ScatterPlot(T, T,'filename','component','1','2', "Scores")
|
2006-04-24 16:07:34 +02:00
|
|
|
|
2006-04-27 13:38:40 +02:00
|
|
|
return [T, P, loading_plot, score_plot]
|