source: WAeUP_SRP/trunk/WAeUPTables.py @ 17935

Last change on this file since 17935 was 5614, checked in by Henrik Bettermann, 14 years ago

Implement first part of new accommodation allocation module. This part contains the new student accommodation catalog which replaces the accommodation objects.

  • Property svn:keywords set to Id
File size: 74.4 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 5614 2010-12-27 07:25:11Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re,os
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record,index): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            v_dump = v
98            if key == 'sex':
99                if v == True:
100                    v_dump = 'F'
101                elif v == False:
102                    v_dump = 'M'
103                d[key] = v_dump
104            elif v:
105                if index == 'translate':
106                    if key == 'lga':
107                        v_dump = self.portal_vocabularies.local_gov_areas.get(v)
108                        if not v_dump:
109                            v_dump = v
110                    elif key == 'aos':
111                        v_dump = self.portal_vocabularies.aos.get(v)
112                d[key] = v_dump
113            else:
114                d[key] = ''
115        return d
116
117###)
118
119    def addRecord(self, **data): ###(
120        # The uid is the same as "bed".
121        uid = data[self.key]
122        res = self.searchResults({"%s" % self.key : uid})
123        if len(res) > 0:
124            raise ValueError("More than one record with uid %s" % uid)
125        self.catalog_object(dict2ob(data), uid=uid)
126        return uid
127
128###)
129
130    def deleteRecord(self, uid):
131        self.uncatalog_object(uid)
132
133    def getRecordByKey(self,key):
134        if not key:
135            return None
136        res = self.evalAdvancedQuery(Eq(self.key,key))
137        if res:
138            return res[0]
139        return None
140
141    def searchAndSetRecord(self, **data):
142        raise NotImplemented
143
144    def modifyRecord(self, record=None, **data): ###(
145        #records = self.searchResults(uid=uid)
146        #import pdb;pdb.set_trace()
147        uid = data[self.key]
148        if record is None:
149            records = self.searchResults({"%s" % self.key : uid})
150            if len(records) > 1:
151                # Can not happen, but anyway...
152                raise ValueError("More than one record with uid %s" % uid)
153            if len(records) == 0:
154                raise KeyError("No record for uid %s" % uid)
155            record = records[0]
156        record_data = {}
157        for field in self.schema() + self.indexes():
158            record_data[field] = getattr(record, field)
159        # Add the updated data:
160        record_data.update(data)
161        self.catalog_object(dict2ob(record_data), uid)
162
163###)
164
165    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
166        if isinstance(name, str):
167            name =  (name,)
168        paths = self._catalog.uids.items()
169        i = 0
170        #import pdb;pdb.set_trace()
171        for p,rid in paths:
172            i += 1
173            metadata = self.getMetadataForRID(rid)
174            record_data = {}
175            for field in name:
176                record_data[field] = metadata.get(field)
177            uid = metadata.get(self.key)
178            self.catalog_object(dict2ob(record_data), uid, idxs=name,
179                                update_metadata=0)
180
181###)
182
183    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
184    def exportAllRecords(self):
185        "export a WAeUPTable"
186        #import pdb;pdb.set_trace()
187        fields = [field for field in self.schema()]
188        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
189        csv = []
190        csv.append(','.join(['"%s"' % fn for fn in fields]))
191        for uid in self._catalog.uids:
192            records = self.searchResults({"%s" % self.key : uid})
193            if len(records) > 1:
194                # Can not happen, but anyway...
195                raise ValueError("More than one record with uid %s" % uid)
196            if len(records) == 0:
197                raise KeyError("No record for uid %s" % uid)
198            rec = records[0]
199            csv.append(format % rec)
200        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
201        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
202
203###)
204
205    security.declareProtected(ModifyPortalContent,"dumpAll")###(
206    def dumpAll(self,index=None,value=None):
207        """dump all data in the table to a csv"""
208        member = self.portal_membership.getAuthenticatedMember()
209        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
210        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
211        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
212        res_list = []
213        lines = []
214        if hasattr(self,"export_keys"):
215            fields = self.export_keys
216        else:
217            fields = []
218            for f in self.schema():
219                fields.append(f)
220        headline = ','.join(fields)
221        out = open(export_file,"wb")
222        out.write(headline +'\n')
223        out.close()
224        out = open(export_file,"a")
225        csv_writer = csv.DictWriter(out,fields,)
226        if index is not None and value is not None:
227            records = self.evalAdvancedQuery(Eq(index,value))
228        else:
229            records = self()
230        nr2export = len(records)
231        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
232        chunk = 2000
233        total = 0
234        start = DateTime.DateTime().timeTime()
235        start_chunk = DateTime.DateTime().timeTime()
236        for record in records:
237            not_all = False
238            d = self.record2dict(fields,record,index)
239            lines.append(d)
240            total += 1
241            if total and not total % chunk or total == len(records):
242                csv_writer.writerows(lines)
243                anz = len(lines)
244                logger.info("wrote %(anz)d  total written %(total)d" % vars())
245                end_chunk = DateTime.DateTime().timeTime()
246                duration = end_chunk-start_chunk
247                per_record = duration/anz
248                till_now = end_chunk - start
249                avarage_per_record = till_now/total
250                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
251                estimated_end = estimated_end.strftime("%H:%M:%S")
252                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
253                start_chunk = DateTime.DateTime().timeTime()
254                lines = []
255        end = DateTime.DateTime().timeTime()
256        logger.info('total time %6.2f m' % ((end-start)/60))
257        import os
258        filename, extension = os.path.splitext(export_file)
259        from subprocess import call
260        msg = "wrote %(total)d records to %(export_file)s" % vars()
261        #try:
262        #    retcode = call('gzip %s' % (export_file),shell=True)
263        #    if retcode == 0:
264        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
265        #except OSError, e:
266        #    retcode = -99
267        #    logger.info("zip failed with %s" % e)
268        logger.info(msg)
269        args = {'portal_status_message': msg}
270        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
271        url = self.REQUEST.get('URL2')
272        return 'ready'
273        #return self.REQUEST.RESPONSE.redirect(url)
274    ###)
275
276
277    security.declarePrivate("_import") ###(
278    def _import_new(self,csv_items,schema, layout, mode,logger):
279        "import data from csv.Dictreader Instance"
280        start = True
281        tr_count = 1
282        total_imported = 0
283        total_not_imported = 0
284        total = 0
285        iname =  "%s" % filename
286        not_imported = []
287        valid_records = []
288        invalid_records = []
289        duplicate_records = []
290        d = {}
291        d['mode'] = mode
292        d['valid_records'] = valid_records
293        d['invalid_records'] = invalid_records
294        d['invalid_records'] = duplicate_records
295        # d['import_fn'] = import_fn
296        # d['imported_fn'] = imported_fn
297        # d['not_imported_fn'] = not_imported_fn
298        validators = {}
299        for widget in layout.keys():
300            try:
301                validators[widget] = layout[widget].validate
302            except AttributeError:
303                logger.info('%s has no validate attribute' % widget)
304                return d
305        for item in csv_items:
306            if start:
307                start = False
308                logger.info('%s starts import from %s.csv' % (member,filename))
309                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
310                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
311                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
312                diff2schema = set(import_keys).difference(set(schema.keys()))
313                diff2layout = set(import_keys).difference(set(layout.keys()))
314                if diff2layout:
315                    em = "not ignorable key(s) %s found in heading" % diff2layout
316                    logger.info(em)
317                    return d
318                # s = ','.join(['"%s"' % fn for fn in import_keys])
319                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
320                # #s = '"id",' + s
321                # open(imported_fn,"a").write(s + '\n')
322                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
323                # format_error = format + ',"%(Error)s"'
324                # #format = '"%(id)s",'+ format
325                adapters = [MappingStorageAdapter(schema, item)]
326            dm = DataModel(item, adapters,context=self)
327            ds = DataStructure(data=item,datamodel=dm)
328            error_string = ""
329            for k in import_keys:
330                if not validators[k](ds,mode=mode):
331                    error_string += " %s : %s" % (k,ds.getError(k))
332            if error_string:
333                item['Error'] = error_string
334                #invalid_records.append(dm)
335                invalid_records.append(item)
336                total_not_imported += 1
337            else:
338                em = format % item
339                valid_records.append(dm)
340                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
341                tr_count += 1
342                total_imported += 1
343            total += 1
344        # if len(imported) > 0:
345        #     open(imported_fn,"a").write('\n'.join(imported))
346        # if len(not_imported) > 0:
347        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
348        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
349        d['imported'] = total_imported
350        d['not_imported'] = total_not_imported
351        d['valid_records'] = valid_records
352        d['invalid_records'] = invalid_records
353        return d
354    ###)
355
356    security.declarePublic("missingValue")###(
357    def missingValue(self):
358        from Missing import MV
359        return MV
360    ###)
361###)
362
363class AccommodationTable(WAeUPTable): ###(
364
365    meta_type = 'WAeUP Accommodation Tool'
366    name = "portal_accommodation"
367    key = "bed"
368    not_occupied = NOT_OCCUPIED
369    def __init__(self,name=None):
370        if name ==  None:
371            name = self.name
372        WAeUPTable.__init__(self, name)
373
374    def searchAndReserveBed(self, student_id,bed_type,random_order=False): ###(
375        logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
376        records = self.evalAdvancedQuery(Eq('student',student_id))
377        if len(records) == 1:
378            #return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
379            logger.info('%s found (reserved) bed %s' % (student_id,records[0].bed))
380            return -1,records[0].bed
381        elif len(records) > 1:
382            logger.info('%s found more than one (reserved) bed' % (student_id))
383            return -3,'more than one bed'
384        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
385        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
386        if len(records) == 0:
387            logger.info('no bed %s available for %s' % (bed_type,student_id))
388            return -2,"no bed"
389        if random_order:
390            import random
391            bed_no = random.randint(0,len(records)-1)
392        else:
393            bed_no = 0
394        rec = records[bed_no]
395        self.modifyRecord(bed=rec.bed,student=student_id)
396        logger.info('%s booked bed %s' % (student_id,rec.bed))
397        return 1,rec.bed
398    ###)
399
400
401InitializeClass(AccommodationTable)
402
403###)
404
405class PinTable(WAeUPTable): ###(
406    from ZODB.POSException import ConflictError
407    security = ClassSecurityInfo()
408    meta_type = 'WAeUP Pin Tool'
409    name = "portal_pins"
410    key = 'pin'
411
412    def __init__(self,name=None):
413        if name ==  None:
414            name = self.name
415        WAeUPTable.__init__(self, name)
416
417    security.declareProtected(ModifyPortalContent,"dumpAll")###(
418    def dumpAll(self,include_unused=None,index=None):
419        """dump all data in the table to a csv"""
420        member = self.portal_membership.getAuthenticatedMember()
421        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
422        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
423        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
424        res_list = []
425        lines = []
426        if hasattr(self,"export_keys"):
427            fields = self.export_keys
428        else:
429            fields = []
430            for f in self.schema():
431                fields.append(f)
432        headline = ','.join(fields)
433        out = open(export_file,"wb")
434        out.write(headline +'\n')
435        out.close()
436        out = open(export_file,"a")
437        csv_writer = csv.DictWriter(out,fields,)
438        if include_unused is not None and str(member) not in ('admin'):
439            logger.info('%s tries to dump pintable with unused pins' % (member))
440            return
441        if include_unused is not None:
442            records = self()
443        else:
444            records = self.evalAdvancedQuery(~Eq('student',''))
445        nr2export = len(records)
446        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
447        chunk = 2000
448        total = 0
449        start = DateTime.DateTime().timeTime()
450        start_chunk = DateTime.DateTime().timeTime()
451        for record in records:
452            not_all = False
453            d = self.record2dict(fields,record,index)
454            lines.append(d)
455            total += 1
456            if total and not total % chunk or total == len(records):
457                csv_writer.writerows(lines)
458                anz = len(lines)
459                logger.info("wrote %(anz)d  total written %(total)d" % vars())
460                end_chunk = DateTime.DateTime().timeTime()
461                duration = end_chunk-start_chunk
462                per_record = duration/anz
463                till_now = end_chunk - start
464                avarage_per_record = till_now/total
465                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
466                estimated_end = estimated_end.strftime("%H:%M:%S")
467                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
468                start_chunk = DateTime.DateTime().timeTime()
469                lines = []
470        end = DateTime.DateTime().timeTime()
471        logger.info('total time %6.2f m' % ((end-start)/60))
472        import os
473        filename, extension = os.path.splitext(export_file)
474        from subprocess import call
475        msg = "wrote %(total)d records to %(export_file)s" % vars()
476        #try:
477        #    retcode = call('gzip %s' % (export_file),shell=True)
478        #    if retcode == 0:
479        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
480        #except OSError, e:
481        #    retcode = -99
482        #    logger.info("zip failed with %s" % e)
483        logger.info(msg)
484        args = {'portal_status_message': msg}
485        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
486        url = self.REQUEST.get('URL2')
487        return self.REQUEST.RESPONSE.redirect(url)
488    ###)
489
490
491
492    def searchAndSetRecord(self, uid, student_id,prefix):
493
494        # The following line must be activated after resetting the
495        # the portal_pins table. This is to avoid duplicate entries
496        # and disable duplicate payments.
497
498        #student_id = student_id.upper()
499
500        #records = self.searchResults(student = student_id)
501        #if len(records) > 0 and prefix in ('CLR','APP'):
502        #    for r in records:
503        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
504        #            return -2
505        records = self.searchResults({"%s" % self.key : uid})
506        if len(records) > 1:
507            # Can not happen, but anyway...
508            raise ValueError("More than one record with uid %s" % uid)
509        if len(records) == 0:
510            return -1,None
511        record = records[0]
512        if record.student == "":
513            record_data = {}
514            for field in self.schema() + self.indexes():
515                record_data[field] = getattr(record, field)
516            # Add the updated data:
517            record_data['student'] = student_id
518            try:
519                self.catalog_object(dict2ob(record_data), uid)
520                return 1,record
521            except ConflictError:
522                return 2,record
523        if record.student.upper() != student_id.upper():
524            return 0,record
525        if record.student.upper() == student_id.upper():
526            return 2,record
527        return -3,record
528InitializeClass(PinTable)
529###)
530
531class PumeResultsTable(WAeUPTable): ###(
532
533    meta_type = 'WAeUP PumeResults Tool'
534    name = "portal_pumeresults"
535    key = "jamb_reg_no"
536    def __init__(self,name=None):
537        if name ==  None:
538            name = self.name
539        WAeUPTable.__init__(self, name)
540
541
542InitializeClass(PumeResultsTable)
543
544###)
545
546class ApplicantsCatalog(WAeUPTable): ###(
547
548    meta_type = 'WAeUP Applicants Catalog'
549    name = "applicants_catalog"
550    key = "reg_no"
551    security = ClassSecurityInfo()
552    #export_keys = (
553    #               "reg_no",
554    #               "status",
555    #               "lastname",
556    #               "sex",
557    #               "date_of_birth",
558    #               "lga",
559    #               "email",
560    #               "phone",
561    #               "passport",
562    #               "entry_mode",
563    #               "pin",
564    #               "screening_type",
565    #               "registration_date",
566    #               "testdate",
567    #               "application_date",
568    #               "screening_date",
569    #               "faculty",
570    #               "department",
571    #               "course1",
572    #               "course2",
573    #               "course3",
574    #               "eng_score",
575    #               "subj1",
576    #               "subj1score",
577    #               "subj2",
578    #               "subj2score",
579    #               "subj3",
580    #               "subj3score",
581    #               "aggregate",
582    #               "course_admitted",
583    #               )
584
585    def __init__(self,name=None):
586        if name ==  None:
587            name = self.name
588        WAeUPTable.__init__(self, name)
589
590    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
591    def new_importCSV(self,filename="JAMB_data",
592                  schema_id="application",
593                  layout_id="import_application",
594                  mode='add'):
595        """ import JAMB data """
596        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
597        pm = self.portal_membership
598        member = pm.getAuthenticatedMember()
599        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
600        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
601        import_fn = "%s/import/%s.csv" % (i_home,filename)
602        if mode not in ('add','edit'):
603            logger.info("invalid mode: %s" % mode)
604        if os.path.exists(lock_fn):
605            logger.info("import of %(import_fn)s already in progress" % vars())
606            return
607        lock_file = open(lock_fn,"w")
608        lock_file.write("%(current)s \n" % vars())
609        lock_file.close()
610        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
611        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
612        stool = getToolByName(self, 'portal_schemas')
613        ltool = getToolByName(self, 'portal_layouts')
614        schema = stool._getOb(schema_id)
615        if schema is None:
616            em = 'No such schema %s' % schema_id
617            logger.error(em)
618            return
619        for postfix in ('_import',''):
620            layout_name = "%(layout_id)s%(postfix)s" % vars()
621            if hasattr(ltool,layout_name):
622                break
623        layout = ltool._getOb(layout_name)
624        if layout is None:
625            em = 'No such layout %s' % layout_id
626            logger.error(em)
627            return
628        try:
629            csv_file = csv.DictReader(open(import_fn,"rb"))
630        except:
631            em = 'Error reading %s.csv' % filename
632            logger.error(em)
633            return
634        d = self._import_new(csv_items,schema,layout,mode,logger)
635        imported = []
636        edited = []
637        duplicates = []
638        not_found = []
639        if len(d['valid_records']) > 0:
640            for record in d['valid_records']:
641                #import pdb;pdb.set_trace()
642                if mode == "add":
643                    try:
644                        self.addRecord(**dict(record.items()))
645                        imported.append(**dict(record.items()))
646                        logger.info("added %s" % record.items())
647                    except ValueError:
648                        dupplicate.append(**dict(record.items()))
649                        logger.info("duplicate %s" % record.items())
650                elif mode == "edit":
651                    try:
652                        self.modifyRecord(**dict(record.items()))
653                        edited.append(**dict(record.items()))
654                        logger.info("edited %s" % record.items())
655                    except KeyError:
656                        not_found.append(**dict(record.items()))
657                        logger.info("not found %s" % record.items())
658        invalid = d['invalid_records']
659        for itype in ("imported","edited","not_found","duplicate","invalid"):
660            outlist = locals[itype]
661            if len(outlist):
662                d = {}
663                for k in outlist[0].keys():
664                    d[k] = k
665                outlist[0] = d
666                outfile = open("file_name_%s" % itype,'w')
667                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
668                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
669###)
670
671    security.declareProtected(ModifyPortalContent,"importCSV")###(
672    def importCSV(self,filename="JAMB_data",
673                  schema_id="application",
674                  layout_id="application_pce",
675                  mode='add'):
676        """ import JAMB data """
677        stool = getToolByName(self, 'portal_schemas')
678        ltool = getToolByName(self, 'portal_layouts')
679        schema = stool._getOb(schema_id)
680        if schema is None:
681            em = 'No such schema %s' % schema_id
682            logger.error(em)
683            return
684        layout = ltool._getOb(layout_id)
685        if layout is None:
686            em = 'No such layout %s' % layout_id
687            logger.error(em)
688            return
689        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
690        d = self._import_old(filename,schema,layout,mode,logger)
691        if len(d['valid_records']) > 0:
692            for record in d['valid_records']:
693                #import pdb;pdb.set_trace()
694                if mode == "add":
695                    self.addRecord(**dict(record.items()))
696                    logger.info("added %s" % record.items())
697                elif mode == "edit":
698                    self.modifyRecord(**dict(record.items()))
699                    logger.info("edited %s" % record.items())
700                else:
701                    logger.info("invalid mode: %s" % mode)
702        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
703    ###)
704
705InitializeClass(ApplicantsCatalog)
706
707###)
708
709class StudentsCatalog(WAeUPTable): ###(
710    security = ClassSecurityInfo()
711
712    meta_type = 'WAeUP Students Catalog'
713    name = "students_catalog"
714    key = "id"
715    affected_types = {   ###(
716                      'StudentApplication':
717                      {'id': 'application',
718                       'fields':
719                       ('jamb_reg_no',
720                        'entry_mode',
721                        #'entry_level',
722                        'entry_session',
723                       )
724                      },
725                      'StudentClearance':
726                      {'id': 'clearance',
727                       'fields':
728                       ('matric_no',
729                        'lga',
730                        'date_of_birth',  # birthday
731                       )
732                      },
733                      'StudentPersonal':
734                      {'id': 'personal',
735                       'fields':
736                       ('name',
737                        'sex',
738                        'perm_address',
739                        'email',
740                        'phone',
741                        'marit_stat',
742                        'firstname',
743                        'middlename',
744                        'lastname',
745                       )
746                      },
747                      'StudentStudyCourse':
748                      {'id': 'study_course',
749                       'fields':
750                       ('course', # study_course
751                        'faculty', # from certificate
752                        'department', # from certificate
753                        'end_level', # from certificate
754                        'level', # current_level
755                        'mode',  # from certificate
756                        'session', # current_session
757                        'verdict', # current_verdict
758                       )
759                      },
760                     }
761    ###)
762
763    def __init__(self,name=None):
764        if name ==  None:
765            name = self.name
766        WAeUPTable.__init__(self, name)
767        return
768       
769    security.declareProtected(ModifyPortalContent,"exportMoodleData") ###(
770    def exportMoodleData(self):
771        "export a WAeUPTable"
772        member = self.portal_membership.getAuthenticatedMember()
773        logger = logging.getLogger('WAeUPTables.StudentsCatalog.exportMoodleData')
774        if str(member) not in ('admin'):
775            logger.info('%s tries to dump Moodle Data' % (member))
776            return       
777        fields = [field for field in self.schema()]
778        export_fields = ['username','firstname','lastname','email','currentsession','regstate','password','city','country','deleted']
779        format = ','.join(['"%%(%s)s"' % fn for fn in export_fields])
780        csv = []
781        csv.append(','.join(['"%s"' % fn for fn in export_fields]))
782        #import pdb;pdb.set_trace()
783        for uid in self._catalog.uids:
784            records = self.searchResults({"%s" % self.key : uid})
785            if len(records) > 1:
786                # Can not happen, but anyway...
787                raise ValueError("More than one record with uid %s" % uid)
788            if len(records) == 0:
789                raise KeyError("No record for uid %s" % uid)
790            rec = self.record2dict(fields,records[0],None)   
791            fn = rec['firstname'].replace(',',' ') 
792            mn = rec['middlename'].replace(',',' ')
793            ln = rec['lastname'].replace(',',' ')
794            rec['firstname'] = fn + ' ' + mn
795            rec['lastname'] = ln
796            rec['regstate'] = rec['review_state']
797            rec['currentsession'] = rec['session']
798            rec['username'] = rec['id']
799            rec['city'] = ''
800            rec['country'] = 'ng'
801            rec['deleted'] = '0'
802            rec['password'] = self.waeup_tool.getCredential(rec['id'])
803            csv.append(format % rec)
804        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
805        open("%s/export/moodle-%s.csv" % (i_home,current),"w+").write('\n'.join(csv))       
806        return 'ready'
807
808    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
809        """ clears the whole enchilada """
810        self._catalog.clear()
811
812        if REQUEST and RESPONSE:
813            RESPONSE.redirect(
814              URL1 +
815              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
816
817    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
818        """ clear the catalog, then re-index everything """
819
820        elapse = time.time()
821        c_elapse = time.clock()
822
823        pgthreshold = self._getProgressThreshold()
824        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
825        self.refreshCatalog(clear=1, pghandler=handler)
826
827        elapse = time.time() - elapse
828        c_elapse = time.clock() - c_elapse
829
830        RESPONSE.redirect(
831            URL1 +
832            '/manage_catalogAdvanced?manage_tabs_message=' +
833            urllib.quote('Catalog Updated \n'
834                         'Total time: %s\n'
835                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
836    ###)
837
838    def fill_certificates_dict(self): ###(
839        "return certificate data in  dict"
840        certificates_brains = self.portal_catalog(portal_type ='Certificate')
841        d = {}
842        for cb in certificates_brains:
843            certificate_doc = cb.getObject().getContent()
844            cb_path = cb.getPath().split('/')
845            ld = {}
846            ld['faculty'] = cb_path[-4]
847            ld['department'] = cb_path[-3]
848            ld['end_level'] = getattr(certificate_doc,'end_level','999')
849            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
850            d[cb.getId] = ld
851        return d
852    ###)
853
854    def get_from_doc_department(self,doc,cached_data={}): ###(
855        "return the students department"
856        if doc is None:
857            return None
858        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
859            return self._v_certificates[doc.study_course]['department']
860        certificate_res = self.portal_catalog(id = doc.study_course)
861        if len(certificate_res) != 1:
862            return None
863        return certificate_res[0].getPath().split('/')[-3]
864
865    def get_from_doc_faculty(self,doc,cached_data={}):
866        "return the students faculty"
867        if doc is None:
868            return None
869        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
870            return self._v_certificates[doc.study_course]['faculty']
871        certificate_res = self.portal_catalog(id = doc.study_course)
872        if len(certificate_res) != 1:
873            return None
874        return certificate_res[0].getPath().split('/')[-4]
875
876    def get_from_doc_end_level(self,doc,cached_data={}):
877        "return the students end_level"
878        if doc is None:
879            return None
880        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
881            return self._v_certificates[doc.study_course]['end_level']
882        certificate_res = self.portal_catalog(id = doc.study_course)
883        if len(certificate_res) != 1:
884            return None
885        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
886
887    def get_from_doc_level(self,doc,cached_data={}):
888        "return the students level"
889        if doc is None:
890            return None
891        return getattr(doc,'current_level',None)
892
893    #def get_from_doc_mode(self,doc,cached_data={}):
894    #    "return the students mode"
895    #    if doc is None:
896    #        return None
897    #    cm = getattr(doc,'current_mode',None)
898    #    return cm
899   
900    def get_from_doc_mode(self,doc,cached_data={}):
901        "return the students mode"
902        if doc is None:
903            return None
904        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
905            return self._v_certificates[doc.study_course]['study_mode']
906        certificate_res = self.portal_catalog(id = doc.study_course)
907        if len(certificate_res) != 1:
908            return None
909        return getattr(certificate_res[0].getObject().getContent(),'study_mode','unknown')   
910
911
912    def get_from_doc_marit_stat(self,doc,cached_data={}):
913        "return the students marit_stat"
914        if doc is None:
915            return None
916        ms = getattr(doc,'marit_stat',None)
917        if ms == True:
918            return 'married'
919        elif ms == False:
920            return 'single'
921        else:
922            return 'undefined'
923           
924    def get_from_doc_date_of_birth(self,doc,cached_data={}):
925        "return the students date of birth"
926        if doc is None:
927            return None
928        return getattr(doc,'birthday',None)           
929
930    def get_from_doc_session(self,doc,cached_data={}):
931        "return the students current_session"
932        if doc is None:
933            return None
934        return getattr(doc,'current_session',None)
935
936    def get_from_doc_entry_session(self,doc,cached_data={}):
937        "return the students entry_session"
938        if doc is None:
939            return None
940        es = getattr(doc,'entry_session',None)
941        if es is not None and len(es) < 3:
942            return es
943        elif len(es) == 9:
944            return es[2:4]   
945        try:
946            digit = int(doc.jamb_reg_no[0])
947        except:
948            return "-1"
949        if digit < 9:
950            return "0%c" % doc.jamb_reg_no[0]
951        return "9%c" % doc.jamb_reg_no[0]
952
953    def get_from_doc_course(self,doc,cached_data={}):
954        "return the students study_course"
955        if doc is None:
956            return None
957        return getattr(doc,'study_course',None)
958
959    def get_from_doc_name(self,doc,cached_data={}):
960        "return the students name from the personal"
961        if doc is None:
962            return None
963        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
964
965    def get_from_doc_verdict(self,doc,cached_data={}):
966        "return the students study_course"
967        if doc is None:
968            return None
969        return getattr(doc,'current_verdict',None)
970    ###)
971
972    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
973        if not hasattr(self,'_v_certificates'):
974            self._v_certificates = self.fill_certificates_dict()
975        if isinstance(name, str):
976            name = (name,)
977        reindextypes = {}
978        reindex_special = []
979        for n in name:
980            if n in ("review_state"):
981                reindex_special.append(n)
982            else:
983                for pt in self.affected_types.keys():
984                    if n in self.affected_types[pt]['fields']:
985                        if reindextypes.has_key(pt):
986                            reindextypes[pt].append(n)
987                        else:
988                            reindextypes[pt]= [n]
989                        break
990        #cached_data = {}
991        #if set(name).intersection(set(('faculty','department','end_level','mode'))):
992        #    cached_data = self.fill_certificates_dict()
993        students = self.portal_catalog(portal_type="Student")
994        if hasattr(self,'portal_catalog_real'):
995            aq_portal = self.portal_catalog_real.evalAdvancedQuery
996        else:
997            aq_portal = self.portal_catalog.evalAdvancedQuery
998        num_objects = len(students)
999        if pghandler:
1000            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1001        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
1002        #import pdb;pdb.set_trace()
1003        for i in xrange(num_objects):
1004            if pghandler: pghandler.report(i)
1005            student_brain = students[i]
1006            student_object = student_brain.getObject()
1007            data = {}
1008            modified = False
1009            sid = data['id'] = student_brain.getId
1010            if reindex_special and 'review_state' in reindex_special:
1011                modified = True
1012                data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1013            sub_objects = False
1014            for pt in reindextypes.keys():
1015                modified = True
1016                try:
1017                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1018                    sub_objects = True
1019                except:
1020                    continue
1021                for field in set(name).intersection(self.affected_types[pt]['fields']):
1022                    if hasattr(self,'get_from_doc_%s' % field):
1023                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc)
1024                    else:
1025                        data[field] = getattr(doc,field)
1026            if not sub_objects and noattr:
1027                import_res = self.returning_import(id = sid)
1028                if not import_res:
1029                    continue
1030                import_record = import_res[0]
1031                data['matric_no'] = import_record.matric_no
1032                data['sex'] = import_record.Sex == 'F'
1033                data['name'] = "%s %s %s" % (import_record.Firstname,
1034                                             import_record.Middlename,
1035                                             import_record.Lastname)
1036                data['jamb_reg_no'] = import_record.Entryregno
1037            if modified:
1038                self.modifyRecord(**data)
1039        if pghandler: pghandler.finish()
1040    ###)
1041
1042    def refreshCatalog(self, clear=0, pghandler=None): ###(
1043        """ re-index everything we can find """
1044        students_folder = self.portal_url.getPortalObject().campus.students
1045        if clear:
1046            self._catalog.clear()
1047        students = self.portal_catalog(portal_type="Student")
1048        num_objects = len(students)
1049        #cached_data = self.fill_certificates_dict()
1050        if not hasattr(self,'_v_certificates'):
1051            self._v_certificates = self.fill_certificates_dict()
1052        if pghandler:
1053            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1054        for i in xrange(num_objects):
1055            if pghandler: pghandler.report(i)
1056            student_brain = students[i]
1057            spath = student_brain.getPath()
1058            student_object = student_brain.getObject()
1059            data = {}
1060            sid = data['id'] = student_brain.getId
1061            #data['review_state'] = student_brain.review_state
1062            data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1063            sub_objects = False
1064            for pt in self.affected_types.keys():
1065                modified = True
1066                try:
1067                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1068                    sub_objects = True
1069                except:
1070                    #from pdb import set_trace;set_trace()
1071                    continue
1072                for field in self.affected_types[pt]['fields']:
1073                    if hasattr(self,'get_from_doc_%s' % field):
1074                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1075                                                                              cached_data=cached_data)
1076                    else:
1077                        data[field] = getattr(doc,field,None)
1078            if not sub_objects:
1079                import_res = self.returning_import(id = sid)
1080                if not import_res:
1081                    continue
1082                import_record = import_res[0]
1083                data['matric_no'] = import_record.matric_no
1084                data['sex'] = import_record.Sex == 'F'
1085                data['name'] = "%s %s %s" % (import_record.Firstname,
1086                                             import_record.Middlename,
1087                                             import_record.Lastname)
1088                data['jamb_reg_no'] = import_record.Entryregno
1089            self.addRecord(**data)
1090        if pghandler: pghandler.finish()
1091    ###)
1092
1093    security.declarePrivate('notify_event_listener') ###(
1094    def notify_event_listener(self,event_type,object,infos):
1095        "listen for events"
1096        if not infos.has_key('rpath'):
1097            return
1098        pt = getattr(object,'portal_type',None)
1099        mt = getattr(object,'meta_type',None)
1100        students_catalog = self
1101        data = {}
1102        if pt == 'Student' and\
1103           mt == 'CPS Proxy Folder' and\
1104           event_type.startswith('workflow'):
1105            data['id'] = object.getId()
1106            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1107            students_catalog.modifyRecord(**data)
1108            return
1109        rpl = infos['rpath'].split('/')
1110        if pt == 'Student' and mt == 'CPS Proxy Folder':
1111            student_id = object.id
1112            if event_type == "sys_add_object":
1113                try:
1114                    self.addRecord(id = student_id)
1115                except ValueError:
1116                    pass
1117                return
1118            elif event_type == 'sys_del_object':
1119                self.deleteRecord(student_id)
1120        if pt not in self.affected_types.keys():
1121            return
1122        if event_type not in ('sys_modify_object'):
1123            return
1124        if mt == 'CPS Proxy Folder':
1125            return
1126        if not hasattr(self,'_v_certificates'):
1127            self._v_certificates = self.fill_certificates_dict()
1128        for field in self.affected_types[pt]['fields']:
1129            if hasattr(self,'get_from_doc_%s' % field):
1130                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1131            else:
1132                data[field] = getattr(object,field)
1133        data['id'] = rpl[2]
1134        self.modifyRecord(**data)
1135    ###)
1136
1137
1138InitializeClass(StudentsCatalog)
1139
1140###)
1141
1142class CertificatesCatalog(WAeUPTable): ###(
1143    security = ClassSecurityInfo()
1144
1145    meta_type = 'WAeUP Certificates Catalog'
1146    name =  "certificates_catalog"
1147    key = "code"
1148    def __init__(self,name=None):
1149        if name ==  None:
1150            name =  self.name
1151        WAeUPTable.__init__(self, name)
1152
1153    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1154        """ clear the catalog, then re-index everything """
1155
1156        elapse = time.time()
1157        c_elapse = time.clock()
1158
1159        pgthreshold = self._getProgressThreshold()
1160        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1161        self.refreshCatalog(clear=1, pghandler=handler)
1162
1163        elapse = time.time() - elapse
1164        c_elapse = time.clock() - c_elapse
1165
1166        RESPONSE.redirect(
1167            URL1 +
1168            '/manage_catalogAdvanced?manage_tabs_message=' +
1169            urllib.quote('Catalog Updated \n'
1170                         'Total time: %s\n'
1171                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1172    ###)
1173
1174    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1175        if isinstance(name, str):
1176            name = (name,)
1177        certificates = self.portal_catalog(portal_type="Certificate")
1178        num_objects = len(certificates)
1179        if pghandler:
1180            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1181        for i in xrange(num_objects):
1182            if pghandler: pghandler.report(i)
1183            certificate_brain = certificates[i]
1184            certificate_object = certificate_brain.getObject()
1185            pl = certificate_brain.getPath().split('/')
1186            data = {}
1187            cid = data[self.key] = certificate_brain.getId
1188            data['faculty'] = pl[-4]
1189            data['department'] = pl[-3]
1190            doc = certificate_object.getContent()
1191            for field in name:
1192                if field not in (self.key,'faculty','department'):
1193                    data[field] = getattr(doc,field)
1194            self.modifyRecord(**data)
1195        if pghandler: pghandler.finish()
1196    ###)
1197
1198    def refreshCatalog(self, clear=0, pghandler=None): ###(
1199        """ re-index everything we can find """
1200        if clear:
1201            self._catalog.clear()
1202        certificates = self.portal_catalog(portal_type="Certificate")
1203        num_objects = len(certificates)
1204        if pghandler:
1205            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1206        #from pdb import set_trace;set_trace()
1207        for i in xrange(num_objects):
1208            if pghandler: pghandler.report(i)
1209            certificate_brain = certificates[i]
1210            certificate_doc = certificate_brain.getObject().getContent()
1211            pl = certificate_brain.getPath().split('/')
1212            data = {}
1213            for field in self.schema():
1214                data[field] = getattr(certificate_doc,field,None)
1215            data[self.key] = certificate_brain.getId
1216            ai = pl.index('academics')
1217            data['faculty'] = pl[ai +1]
1218            data['department'] = pl[ai +2]
1219            if clear:
1220                self.addRecord(**data)
1221            else:
1222                self.modifyRecord(**data)
1223        if pghandler: pghandler.finish()
1224    ###)
1225
1226    security.declarePrivate('notify_event_listener') ###(
1227    def notify_event_listener(self,event_type,object,infos):
1228        "listen for events"
1229        if not infos.has_key('rpath'):
1230            return
1231        pt = getattr(object,'portal_type',None)
1232        mt = getattr(object,'meta_type',None)
1233        if pt != 'Certificate':
1234            return
1235        data = {}
1236        rpl = infos['rpath'].split('/')
1237        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1238            return
1239        certificate_id = object.getId()
1240        data[self.key] = certificate_id
1241        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1242            try:
1243                self.addRecord(**data)
1244            except ValueError:
1245                return
1246            certificate_id = object.getId()
1247            doc = object.getContent()
1248            if doc is None:
1249                return
1250            for field in self.schema():
1251                data[field] = getattr(doc,field,None)
1252            data[self.key] = certificate_id
1253            ai = rpl.index('academics')
1254            data['faculty'] = rpl[ai +1]
1255            data['department'] = rpl[ai +2]
1256            self.modifyRecord(**data)
1257            return
1258        if event_type == "sys_del_object":
1259            self.deleteRecord(certificate_id)
1260            return
1261        if event_type == "sys_modify_object" and mt == 'Certificate':
1262            #from pdb import set_trace;set_trace()
1263            for field in self.schema():
1264                data[field] = getattr(object,field,None)
1265            certificate_id = object.aq_parent.getId()
1266            data[self.key] = certificate_id
1267            ai = rpl.index('academics')
1268            data['faculty'] = rpl[ai +1]
1269            data['department'] = rpl[ai +2]
1270            self.modifyRecord(**data)
1271    ###)
1272
1273
1274InitializeClass(CertificatesCatalog)
1275###)
1276
1277class CoursesCatalog(WAeUPTable): ###(
1278    security = ClassSecurityInfo()
1279
1280    meta_type = 'WAeUP Courses Catalog'
1281    name =  "courses_catalog"
1282    key = "code"
1283    def __init__(self,name=None):
1284        if name ==  None:
1285            name =  self.name
1286        WAeUPTable.__init__(self, name)
1287
1288    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1289        """ clear the catalog, then re-index everything """
1290
1291        elapse = time.time()
1292        c_elapse = time.clock()
1293
1294        pgthreshold = self._getProgressThreshold()
1295        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1296        self.refreshCatalog(clear=1, pghandler=handler)
1297
1298        elapse = time.time() - elapse
1299        c_elapse = time.clock() - c_elapse
1300
1301        RESPONSE.redirect(
1302            URL1 +
1303            '/manage_catalogAdvanced?manage_tabs_message=' +
1304            urllib.quote('Catalog Updated \n'
1305                         'Total time: %s\n'
1306                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1307    ###)
1308
1309    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1310        if isinstance(name, str):
1311            name = (name,)
1312        courses = self.portal_catalog(portal_type="Course")
1313        num_objects = len(courses)
1314        if pghandler:
1315            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1316        for i in xrange(num_objects):
1317            if pghandler: pghandler.report(i)
1318            course_brain = courses[i]
1319            course_object = course_brain.getObject()
1320            pl = course_brain.getPath().split('/')
1321            data = {}
1322            cid = data[self.key] = course_brain.getId
1323            data['faculty'] = pl[-4]
1324            data['department'] = pl[-3]
1325            doc = course_object.getContent()
1326            for field in name:
1327                if field not in (self.key,'faculty','department'):
1328                    data[field] = getattr(doc,field)
1329            self.modifyRecord(**data)
1330        if pghandler: pghandler.finish()
1331    ###)
1332
1333    def refreshCatalog(self, clear=0, pghandler=None): ###(
1334        """ re-index everything we can find """
1335        if clear:
1336            self._catalog.clear()
1337        courses = self.portal_catalog(portal_type="Course")
1338        num_objects = len(courses)
1339        if pghandler:
1340            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1341        #from pdb import set_trace;set_trace()
1342        for i in xrange(num_objects):
1343            if pghandler: pghandler.report(i)
1344            course_brain = courses[i]
1345            course_doc = course_brain.getObject().getContent()
1346            pl = course_brain.getPath().split('/')
1347            data = {}
1348            for field in self.schema():
1349                data[field] = getattr(course_doc,field,None)
1350            data[self.key] = course_brain.getId
1351            ai = pl.index('academics')
1352            data['faculty'] = pl[ai +1]
1353            data['department'] = pl[ai +2]
1354            if clear:
1355                self.addRecord(**data)
1356            else:
1357                self.modifyRecord(**data)
1358        if pghandler: pghandler.finish()
1359    ###)
1360
1361    security.declarePrivate('notify_event_listener') ###(
1362    def notify_event_listener(self,event_type,object,infos):
1363        "listen for events"
1364        if not infos.has_key('rpath'):
1365            return
1366        pt = getattr(object,'portal_type',None)
1367        mt = getattr(object,'meta_type',None)
1368        if pt != 'Course':
1369            return
1370        data = {}
1371        rpl = infos['rpath'].split('/')
1372        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1373            return
1374        course_id = object.getId()
1375        data[self.key] = course_id
1376        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1377            try:
1378                self.addRecord(**data)
1379            except ValueError:
1380                return
1381            course_id = object.getId()
1382            doc = object.getContent()
1383            if doc is None:
1384                return
1385            for field in self.schema():
1386                data[field] = getattr(doc,field,None)
1387            data[self.key] = course_id
1388            ai = rpl.index('academics')
1389            data['faculty'] = rpl[ai +1]
1390            data['department'] = rpl[ai +2]
1391            self.modifyRecord(**data)
1392            return
1393        if event_type == "sys_del_object":
1394            self.deleteRecord(course_id)
1395            return
1396        if event_type == "sys_modify_object" and mt == 'Course':
1397            #from pdb import set_trace;set_trace()
1398            for field in self.schema():
1399                data[field] = getattr(object,field,None)
1400            course_id = object.aq_parent.getId()
1401            data[self.key] = course_id
1402            ai = rpl.index('academics')
1403            data['faculty'] = rpl[ai +1]
1404            data['department'] = rpl[ai +2]
1405            self.modifyRecord(**data)
1406    ###)
1407
1408
1409InitializeClass(CoursesCatalog)
1410###)
1411
1412class CourseResults(WAeUPTable): ###(
1413    security = ClassSecurityInfo()
1414
1415    meta_type = 'WAeUP Results Catalog'
1416    name = "course_results"
1417    key = "key" #student_id + level + course_id
1418    def __init__(self,name=None):
1419        if name ==  None:
1420            name = self.name
1421        WAeUPTable.__init__(self, name)
1422        self._queue = []
1423
1424    def addMultipleRecords(self, records): ###(
1425        """add many records"""
1426        existing_uids = []
1427        for data in records:
1428            uid = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1429            data['%s' % self.key] = uid
1430            query = Eq(self.key, uid)
1431            res = self.course_results.evalAdvancedQuery(query)
1432            if len(res) > 0:
1433                rec = res[0]
1434                equal = True
1435                for attr in ('student_id','level_id','course_id'):
1436                    if getattr(rec,attr,'') != data[attr]:
1437                        equal = False
1438                        break
1439                if equal:
1440                    existing_uids += uid,
1441                    continue
1442            self.catalog_object(dict2ob(data), uid=uid)
1443        return existing_uids
1444    ###)
1445
1446    def deleteResultsHere(self,level_id,student_id): ###(
1447        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1448        course_results = self.course_results.evalAdvancedQuery(query)
1449        #import pdb;pdb.set_trace()
1450        for result in course_results:
1451            self.deleteRecord(result.key)
1452    ###)
1453
1454    def moveResultsHere(self,level,student_id): ###(
1455        #import pdb;pdb.set_trace()
1456        level_id = level.getId()
1457        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1458        course_results = self.course_results.evalAdvancedQuery(query)
1459        existing_courses = [cr.code for cr in course_results]
1460        to_delete = []
1461        for code,obj in level.objectItems():
1462            to_delete.append(code)
1463            carry_over = False
1464            if code.endswith('_co'):
1465                carry_over = True
1466                code  = code[:-3]
1467            if code in existing_courses:
1468                continue
1469            course_result_doc = obj.getContent()
1470            data = {}
1471            course_id = code
1472            for field in self.schema():
1473                data[field] = getattr(course_result_doc,field,'')
1474            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1475            data['student_id'] = student_id
1476            data['level_id'] = level_id
1477            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1478            data['session_id'] = session_id
1479            #data['queue_status'] = OBJECT_CREATED
1480            data['code'] = course_id
1481            data['carry_over'] = carry_over
1482            self.catalog_object(dict2ob(data), uid=key)
1483        level.manage_delObjects(to_delete)
1484    ###)
1485
1486    def getCourses(self,student_id,level_id): ###(
1487        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1488        course_results = self.course_results.evalAdvancedQuery(query)
1489        carry_overs = []
1490        normal1 = []
1491        normal2 = []
1492        normal3 = []
1493        total_credits = 0
1494        gpa = 0
1495        for brain in course_results:
1496            d = {}
1497
1498            for field in self.schema():
1499                d[field] = getattr(brain,field,None)
1500                if repr(d[field]) == 'Missing.Value':
1501                    d[field] = ''
1502            d['weight'] = ''
1503            d['grade'] = ''
1504            d['score'] = ''
1505
1506            if str(brain.credits).isdigit():
1507                credits = int(brain.credits)
1508                total_credits += credits
1509                score = getattr(brain,'score',0)
1510                if score and str(score).isdigit() and int(score) > 0:
1511                    score = int(score)
1512                    grade,weight = self.getGradesFromScore(score,'')
1513                    gpa += weight * credits
1514                    d['weight'] = weight
1515                    d['grade'] = grade
1516                    d['score'] = score
1517
1518            #if str(brain.ca1).isdigit() and str(brain.ca2).isdigit() and str(brain.exam).isdigit():
1519            #    d['score_calc'] = int(brain.ca1) + int(brain.ca2) + int(brain.exam)
1520            #else:
1521            #    d['score_calc'] = ''
1522            try:
1523                d['score_calc'] = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1524            except:
1525                d['score_calc'] = ''
1526
1527            if d['score_calc']:
1528                grade = self.getGradesFromScore(d['score_calc'],level_id)
1529                d['grade'] = grade
1530
1531            d['coe'] = ''
1532            if brain.core_or_elective:
1533                d['coe'] = 'Core'
1534            elif brain.core_or_elective == False:
1535                d['coe'] = 'Elective'
1536            id = code = d['id'] = brain.code
1537            d['code'] = code
1538            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1539            if res:
1540                course = res[0]
1541                d['title'] = course.title
1542                # The courses_catalog contains strings and integers in its semester field.
1543                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1544                d['semester'] = str(course.semester)
1545            else:
1546                d['title'] = "Course has been removed from course list"
1547                d['semester'] = ''
1548            if brain.carry_over:
1549                d['coe'] = 'CO'
1550                carry_overs.append(d)
1551            else:
1552                if d['semester'] == '1':
1553                    normal1.append(d)
1554
1555                elif d['semester'] == '2':
1556                    normal2.append(d)
1557                else:
1558                    normal3.append(d)
1559        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1560        #                                "%(semester)s%(code)s" % y))
1561        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1562                                             "%(semester)s%(code)s" % y))
1563        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1564    ###)
1565
1566   
1567    # for transcript only
1568    def getAllCourses(self,student_id): ###(
1569        query = Eq('student_id',student_id)
1570        course_results = self.course_results.evalAdvancedQuery(query)
1571        courses = []
1572        for brain in course_results:
1573            d = {}
1574
1575            for field in self.schema():
1576                d[field] = getattr(brain,field,'')
1577
1578            d['weight'] = ''
1579            d['grade'] = ''
1580            d['score'] = ''
1581
1582            if str(brain.credits).isdigit():
1583                credits = int(brain.credits)
1584                score = getattr(brain,'score',0)
1585                if score and str(score).isdigit() and int(score) > 0:
1586                    score = int(score)
1587                    grade,weight = self.getGradesFromScore(score,'')
1588                    d['weight'] = weight
1589                    d['grade'] = grade
1590                    d['score'] = score
1591            d['coe'] = ''
1592            if brain.core_or_elective:
1593                d['coe'] = 'Core'
1594            elif brain.core_or_elective == False:
1595                d['coe'] = 'Elective'
1596            id = code = d['id'] = brain.code
1597            d['code'] = code
1598            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1599            if res:
1600                course = res[0]
1601                d['title'] = course.title
1602                # The courses_catalog contains strings and integers in its semester field.
1603                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1604                d['semester'] = str(course.semester)
1605            else:
1606                d['title'] = "Course has been removed from course list"
1607                d['semester'] = ''
1608            if brain.carry_over:
1609                d['coe'] = 'CO'
1610            courses.append(d)
1611        return courses
1612    ###)
1613   
1614    def getYearGroupAverage(self,session_id,level_id): ###(
1615        query = Eq('session_id',session_id) & Eq('level_id',level_id)
1616        course_results = self.course_results.evalAdvancedQuery(query)
1617        yga1 = 0
1618        yg1 = []
1619        counter1 = 0
1620        yga2 = 0
1621        yg2 = []
1622        counter2 = 0
1623        yga3 = 0
1624        yg3 = []
1625        counter3 = 0       
1626        #import pdb;pdb.set_trace()
1627        for brain in course_results:
1628            try:
1629                om = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1630                if not om > 0:
1631                    continue
1632                code = brain.code               
1633                res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1634                if res:
1635                    course = res[0]
1636                    # The courses_catalog contains strings and integers in its semester field.
1637                    # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1638                    semester = str(course.semester)
1639                else:
1640                    semester = ''
1641                if semester == '1':
1642                    counter1 += 1
1643                    yga1 += om
1644                    yg1.append(om)
1645                elif semester == '2':
1646                    counter2 += 1
1647                    yga2 += om     
1648                    yg2.append(om)   
1649                if semester == '3':
1650                    counter3 += 1
1651                    yga3 += om
1652                    yg3.append(om)
1653            except:
1654                continue               
1655        if counter1:
1656            yga1 /= counter1
1657            yga1 = '%.2f' % yga1   
1658        if counter2:
1659            yga2 /= counter2
1660            yga2 = '%.2f' % yga2   
1661        if counter3:
1662            yga3 /= counter3
1663            yga3 = '%.2f' % yga3                                   
1664        return yga1, yga2, yga3, counter1, counter2, counter3, yg1, yg2, yg3
1665    ###)
1666   
1667   
1668    #security.declarePublic("calculateCoursePosition")
1669    def calculateCoursePosition(self,session_id,level_id,code,score,semester=None):
1670        #"""calculate Course Position"""
1671        query = Eq('session_id',session_id) & Eq('level_id',level_id) & Eq('code',code)
1672        course_results = self.course_results.evalAdvancedQuery(query)
1673        ygc = []
1674        #import pdb;pdb.set_trace() 
1675        for brain in course_results:
1676            try:
1677                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
1678                    continue
1679                #code = brain.code   
1680                if semester:
1681                    res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1682                    if res:
1683                        course = res[0]
1684                        # The courses_catalog contains strings and integers in its semester field.
1685                        # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1686                        semester_from_course = str(course.semester)
1687                    else:
1688                        continue
1689                    if semester != semester_from_course:
1690                        continue
1691                ygc.append(float(brain.ca1) + float(brain.ca2) + float(brain.exam))
1692            except:
1693                continue     
1694        ygc.sort(reverse=True)
1695        if not len(ygc):
1696            return 'no result'
1697        #import pdb;pdb.set_trace()       
1698        for pos in range(len(ygc)):
1699            if ygc[pos] <= float(score):
1700                break
1701        output = {}   
1702        output['pos'] =  '%d of %d' % (pos+1,len(ygc))
1703        output['ygc'] = ygc
1704        return output
1705       
1706    security.declareProtected(ModifyPortalContent,"calculateAllCoursePositions")
1707    def calculateAllCoursePositions(self,session_id=None):
1708        """calculate All Course Positions"""
1709        logger = logging.getLogger('WAeUPTables.CourseResults.calculateAllCoursePositions')
1710        member = self.portal_membership.getAuthenticatedMember()
1711        logger.info('%s starts recalculation of positions in session %s' % (member,session_id))
1712        if session_id:
1713            query = Eq('session_id',session_id)
1714        else:
1715            return 'no session_id provided'
1716        course_results = self.course_results.evalAdvancedQuery(query)
1717        for brain in course_results:
1718            try:
1719                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
1720                    data = {}
1721                    data[self.key] = brain.key
1722                    data['pic'] = ''
1723                    self.modifyRecord(**data)                   
1724                    continue
1725                res = self.courses_catalog.evalAdvancedQuery(Eq('code',brain.code))
1726                if res:
1727                    course = res[0]
1728                    semester_from_course = str(course.semester)
1729                else:
1730                    continue                   
1731                score = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1732                pic = self.calculateCoursePosition(session_id,brain.level_id,brain.code,score,semester_from_course)['pos']
1733                data = {}
1734                data[self.key] = brain.key
1735                data['pic'] = pic
1736                self.modifyRecord(**data)
1737            except:
1738                data = {}
1739                data[self.key] = brain.key
1740                data['pic'] = ''
1741                self.modifyRecord(**data)
1742                continue       
1743        logger.info('recalculation finished')             
1744        return 'ready'   
1745   
1746    def exportRemoveAllCourses(self,student_id,export=False,remove=False): ###(
1747        ""
1748        query = Eq('student_id',student_id)
1749        cr_catalog = self.course_results
1750        course_results = cr_catalog.evalAdvancedQuery(query)
1751        courses = []
1752        fields = self.schema()
1753        format = '"%(' + ')s","%('.join(fields) + ')s"'
1754        for brain in course_results:
1755            d = {}
1756            for field in fields:
1757                d[field] = getattr(brain,field,'')
1758            courses.append(format % d)
1759               
1760        if export:
1761            export_file = "%s/export/course_results_removed.csv" % (i_home)
1762            if not os.path.exists(export_file): 
1763                file_handler = open(export_file,"a")
1764                headline = ','.join(fields)
1765                file_handler.write(headline +'\n')
1766            else:
1767                file_handler = open(export_file,"a")
1768            for line in courses:
1769                file_handler.write(line +'\n')
1770
1771        if remove:
1772            for brain in course_results:
1773                key = getattr(brain,'key','')
1774                cr_catalog.deleteRecord(key)
1775       
1776        return courses
1777    ###)   
1778   
1779   
1780
1781InitializeClass(CourseResults)
1782###)
1783
1784class OnlinePaymentsImport(WAeUPTable): ###(
1785
1786    meta_type = 'WAeUP Online Payment Transactions'
1787    name = "online_payments_import"
1788    key = "order_id"
1789    def __init__(self,name=None):
1790        if name ==  None:
1791            name = self.name
1792        WAeUPTable.__init__(self, name)
1793
1794
1795InitializeClass(OnlinePaymentsImport)
1796###)
1797
1798class ReturningImport(WAeUPTable): ###(
1799
1800    meta_type = 'Returning Import Table'
1801    name = "returning_import"
1802    key = "matric_no"
1803    def __init__(self,name=None):
1804        if name ==  None:
1805            name = self.name
1806        WAeUPTable.__init__(self, name)
1807
1808
1809InitializeClass(ReturningImport)
1810###)
1811
1812class ResultsImport(WAeUPTable): ###(
1813
1814    meta_type = 'Results Import Table'
1815    name = "results_import"
1816    key = "key"
1817    def __init__(self,name=None):
1818        if name ==  None:
1819            name = self.name
1820        WAeUPTable.__init__(self, name)
1821
1822
1823InitializeClass(ResultsImport)
1824
1825###)
1826
1827class PaymentsCatalog(WAeUPTable): ###(
1828    security = ClassSecurityInfo()
1829
1830    meta_type = 'WAeUP Payments Catalog'
1831    name = "payments_catalog"
1832    key = "order_id"
1833    def __init__(self,name=None):
1834        if name ==  None:
1835            name = self.name
1836        WAeUPTable.__init__(self, name)
1837
1838
1839    security.declarePrivate('notify_event_listener') ###(
1840    def notify_event_listener(self,event_type,object,infos):
1841        "listen for events"
1842        if not infos.has_key('rpath'):
1843            return
1844        pt = getattr(object,'portal_type',None)
1845        mt = getattr(object,'meta_type',None)
1846        data = {}
1847        if pt != 'Payment':
1848            return
1849        if event_type == 'sys_del_object' and mt == 'CPS Proxy Folder':
1850            self.deleteRecord(object.getContent().order_id)
1851        if mt == 'CPS Proxy Folder':
1852            return # is handled only for the real object
1853        if event_type not in ('sys_modify_object'):
1854            return
1855        for field in self.schema():
1856            data[field] = getattr(object,field,'')
1857        rpl = infos['rpath'].split('/')
1858        #import pdb;pdb.set_trace()
1859        student_id = rpl[-4]
1860        data['student_id'] = student_id
1861        modified = False
1862        try:
1863            self.modifyRecord(**data)
1864            modified = True
1865        except KeyError:
1866            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1867            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1868            pass
1869        if not modified:
1870            try:
1871                self.addRecord(**data)
1872            except:
1873                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1874                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1875        ###)
1876
1877
1878    def exportRemoveAllPayments(self,student_id,export=False,remove=False): ###(
1879        ""
1880        query = Eq('student_id',student_id)
1881        pm_catalog = self.payments_catalog
1882        payments = pm_catalog.evalAdvancedQuery(query)
1883        payments_dic = []
1884        fields = self.schema()
1885        format = '"%(' + ')s","%('.join(fields) + ')s"'
1886        for brain in payments:
1887            d = {}
1888            for field in fields:
1889                d[field] = getattr(brain,field,'')
1890            payments_dic.append(format % d)
1891               
1892        if export:
1893            export_file = "%s/export/payments_removed.csv" % (i_home)
1894            if not os.path.exists(export_file): 
1895                file_handler = open(export_file,"a")
1896                headline = ','.join(fields)
1897                file_handler.write(headline +'\n')
1898            else:
1899                file_handler = open(export_file,"a")
1900            for line in payments_dic:
1901                file_handler.write(line +'\n')
1902
1903        if remove:
1904            for brain in payments:
1905                order_id = getattr(brain,'order_id','')
1906                pm_catalog.deleteRecord(order_id)
1907       
1908        return payments_dic
1909    ###)   
1910
1911    security.declareProtected(ModifyPortalContent,"dumpPayments")###(
1912    def dumpPayments(self,session_id=''):
1913        """dump all valid payments and combine with student data """
1914       
1915        if not session_id:
1916            session_id  = self.getSessionId()[0]
1917        member = self.portal_membership.getAuthenticatedMember()
1918        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpPayments')
1919        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
1920        export_file = "%s/export/valid_payments%s_%s.csv" % (i_home,session_id,current,)       
1921       
1922        pm_catalog = self.payments_catalog
1923        query = Eq('status','paid') & Eq('type','online') & Eq('session_id',session_id)
1924        payments = pm_catalog.evalAdvancedQuery(query)
1925        payments_dic = []
1926        s_catalog = self.students_catalog
1927        fields_pm = pm_catalog.schema()
1928        fields_s = s_catalog.schema()
1929        fields =  fields_pm + fields_s
1930       
1931        format = '"%(' + ')s","%('.join(fields) + ')s"'
1932        #import pdb;pdb.set_trace()
1933        for brain in payments:
1934            d = {}
1935            for field in fields_pm:
1936                d[field] = getattr(brain,field,'')
1937           
1938            student_id = getattr(brain,'student_id','')
1939            query = Eq('id',student_id)   
1940            student = s_catalog.evalAdvancedQuery(query)
1941            if student:
1942                for field in fields_s:
1943                    d[field] = getattr(student[0],field,'')               
1944            payments_dic.append(format % d)     
1945           
1946        if not os.path.exists(export_file): 
1947            file_handler = open(export_file,"a")
1948            headline = ','.join(fields)
1949            file_handler.write(headline +'\n')
1950        else:
1951            file_handler = open(export_file,"a")
1952        for line in payments_dic:
1953            file_handler.write(line +'\n')     
1954       
1955        return 'ready'       
1956           
1957
1958InitializeClass(PaymentsCatalog)
1959
1960###)
1961
1962class RemovedStudentIds(WAeUPTable): ###(
1963
1964    meta_type = 'WAeUP Removed StudentIds'
1965    name = "removed_student_ids"
1966    key = "id"
1967    def __init__(self,name=None):
1968        if name ==  None:
1969            name = self.name
1970        WAeUPTable.__init__(self, name)
1971
1972
1973InitializeClass(RemovedStudentIds)
1974
1975class AccommodationCatalog(WAeUPTable): ###(
1976
1977    meta_type = 'WAeUP Accommodation Catalog'
1978    name = "accommodation_catalog"
1979    key = "catkey" #student_id + session_id
1980    def __init__(self,name=None):
1981        if name ==  None:
1982            name = self.name
1983        WAeUPTable.__init__(self, name)
1984
1985
1986InitializeClass(AccommodationCatalog)
1987
1988###)
1989
1990# BBB:
1991AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.