Skip to content

Commit

Permalink
Merge pull request #40 from Galithil/master
Browse files Browse the repository at this point in the history
Updates to LIMS2DB
  • Loading branch information
Galithil authored Aug 18, 2016
2 parents abcbfe3 + 0438ef8 commit 6d006c1
Show file tree
Hide file tree
Showing 6 changed files with 507 additions and 23 deletions.
447 changes: 447 additions & 0 deletions LIMS2DB/classes.py

Large diffs are not rendered by default.

23 changes: 15 additions & 8 deletions LIMS2DB/diff.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@

import LIMS2DB.objectsDB.objectsDB as DB

from genologics.config import BASEURI, USERNAME, PASSWORD
from genologics.lims import Lims
from LIMS2DB.utils import setupLog
from genologics_sql.utils import *

def diff_project_objects(pj_id, couch, logfile):
def diff_project_objects(pj_id, couch, logfile, new=True):

proj_db = couch['projects']
samp_db = couch['samples']
Expand All @@ -26,11 +25,18 @@ def diff_project_objects(pj_id, couch, logfile):
old_project.pop('modification_time', None)
old_project.pop('creation_time', None)

new_project = DB.ProjectDB(lims, pj_id, samp_db, log)
if new:
from LIMS2DB.classes import ProjectSQL
session=get_session()
host=get_configuration()['url']
new_project=ProjectSQL(session, log, pj_id, host, couch)
else:
import LIMS2DB.objectsDB.objectsDB as DB
new_project = DB.ProjectDB(lims, pj_id, samp_db, log)

fediff=diff_objects(old_project, new_project.obj)

return {pj_id : fediff}
return (fediff, old_project, new_project.obj)



Expand All @@ -51,11 +57,12 @@ def diff_objects(o1, o2, parent=''):
diffs["{} {}".format(parent, key)]=[o1[key], o2[key]]

else:
diffs["key {}".format(key)]=[o1[key], "missing"]
if o1[key]:
diffs["key {} {}".format(parent, key)]=[o1[key], "missing"]

for key in o2:
if key not in o1:
diffs["key {}".format(key)]=["missing", o2[key]]
if key not in o1 and o2[key]:
diffs["key {} {}".format(parent, key)]=["missing", o2[key]]


return diffs
Expand Down
2 changes: 1 addition & 1 deletion LIMS2DB/objectsDB/objectsDB.py
Original file line number Diff line number Diff line change
Expand Up @@ -938,6 +938,6 @@ def _get_lib_val_info(self, agrlibQCsteps, libvalstart, latest_caliper_id = None
library_validation["average_size_bp"] = average_size_bp
if latest_caliper_id and (Process(self.lims, id=latest_caliper_id['id'])).date_run >= (Process(self.lims, id=libvalstart['id']).date_run):
library_validation["caliper_image"] = get_caliper_img(self.sample_name,
latest_caliper_id['id'], self.lims)
latest_caliper_id['id'], self.lims)
library_validations[agrlibQCstep['id']] = delete_Nones(library_validation)
return delete_Nones(library_validations)
3 changes: 3 additions & 0 deletions LIMS2DB/objectsDB/process_categories.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@
'506': "Pre-Pooling (MiSeq) 4.0",
'508': "Applications Pre-Pooling",
'716': 'Library Pooling (HiSeq X) 1.0'}
PREPSTARTFINLIB = {
'255': "Library Pooling (Finished Libraries) 4.0"
}
PREPSTART = {
'10' : 'Aliquot Libraries for Hybridization (SS XT)',
'47' : 'mRNA Purification, Fragmentation & cDNA synthesis (TruSeq RNA) 4.0',
Expand Down
6 changes: 6 additions & 0 deletions LIMS2DB/parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,12 @@ def processWSULSQL(args, queue, logqueue):
final_doc=lutils.merge(ws.obj, doc)
else:
final_doc=ws.obj
#clean possible name duplicates
for row in db.view('worksets/name')[ws.obj['name']]:
doc=db.get(row.id)
if doc['id'] != ws.obj['id']:
proclog.warning("Duplicate name {} for worksets {} and {}".format(doc['name'], doc['id'], final_doc['id']))
db.delete(doc)
db.save(final_doc)
proclog.info("updating {0}".format(ws.obj['name']))
queue.task_done()
Expand Down
49 changes: 35 additions & 14 deletions scripts/project_summary_upload_LIMS.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,10 @@
from optparse import OptionParser
from LIMS2DB.utils import formatStack
from statusdb.db.utils import *
from genologics_sql.utils import get_session
from genologics_sql.queries import get_last_modified_projectids
from genologics_sql.utils import *
from genologics_sql.tables import Project as DBProject
from LIMS2DB.classes import ProjectSQL

from pprint import pprint

Expand Down Expand Up @@ -103,6 +105,8 @@ def main(options):
proj_db = couch['projects']
samp_db = couch['samples']
mainlims = Lims(BASEURI, USERNAME, PASSWORD)
lims_db = get_session()

mainlog = logging.getLogger('psullogger')
mainlog.setLevel(level=logging.INFO)
mfh = logging.handlers.RotatingFileHandler(options.logfile, maxBytes=209715200, backupCount=5)
Expand All @@ -111,13 +115,17 @@ def main(options):
mainlog.addHandler(mfh)

if options.project_name:
proj = mainlims.get_projects(name = options.project_name)
if not proj:
mainlog.warn('No project named {man_name} in Lims'.format(
man_name = options.project_name))
else:
if options.old:
proj = mainlims.get_projects(name = options.project_name)
if not proj:
mainlog.warn('No project named {man_name} in Lims'.format(
man_name = options.project_name))
P = PSUL(proj[0], samp_db, proj_db, options.upload, options.project_name, output_f, mainlog)
P.handle_project()
else:
host=get_configuration()['url']
pj_id=lims_db.query(DBProject.luid).filter(DBProject.name == options.project_name).scalar()
P = ProjectSQL(lims_db, mainlog, pj_id, host, couch)
else :
projects=create_projects_list(options, mainlims, mainlog)
masterProcess(options,projects, mainlims, mainlog)
Expand Down Expand Up @@ -189,14 +197,26 @@ def processPSUL(options, queue, logqueue):
open(lockfile,'w').close()
except:
proclog.error("cannot create lockfile {}".format(lockfile))
try:
proj=mylims.get_projects(name=projname)[0]
P = PSUL(proj, samp_db, proj_db, options.upload, options.project_name, options.output_f, proclog)
P.handle_project()
except :
error=sys.exc_info()
stack=traceback.extract_tb(error[2])
proclog.error("{0}:{1}\n{2}".format(error[0], error[1], formatStack(stack)))
if options.old:
try:
proj=mylims.get_projects(name=projname)[0]
P = PSUL(proj, samp_db, proj_db, options.upload, options.project_name, options.output_f, proclog)
P.handle_project()
except :
error=sys.exc_info()
stack=traceback.extract_tb(error[2])
proclog.error("{0}:{1}\n{2}".format(error[0], error[1], formatStack(stack)))
else:
try:
db_session=get_session()
pj_id=db.session.query(DBProject.luid).filter(DBProject.name == projname).scalar()
host=get_configuration()['url']
P = ProjectSQL(db_session, proclog, pj_id, host, couch)
P.save()
except :
error=sys.exc_info()
stack=traceback.extract_tb(error[2])
proclog.error("{0}:{1}\n{2}".format(error[0], error[1], formatStack(stack)))

try:
os.remove(lockfile)
Expand Down Expand Up @@ -335,6 +355,7 @@ def emit(self, record):
parser.add_option("-j", "--hours", dest = "hours",type='int', help = ("only handle projects modified in the last X hours"), default=None)
parser.add_option("-k", "--control", dest = "control", action="store_true", help = ("only perform a dry-run"), default=False)
parser.add_option("-i", "--input", dest = "input", help = ("path to the input file containing projects to update"), default=None)
parser.add_option("--old", dest = "old", help = ("use the old version of psul, via the API"), action="store_true", default=False)

(options, args) = parser.parse_args()
main(options)
Expand Down

0 comments on commit 6d006c1

Please sign in to comment.