source: WAeUP_SRP/trunk/WAeUPTables.py @ 5136

Last change on this file since 5136 was 4693, checked in by Henrik Bettermann, 15 years ago

FCEOkene: add diploma programme application

  • Property svn:keywords set to Id
File size: 74.2 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 4693 2010-01-06 06:49:14Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re,os
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record,index): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            v_dump = v
98            if key == 'sex':
99                if v == True:
100                    v_dump = 'F'
101                elif v == False:
102                    v_dump = 'M'
103                d[key] = v_dump
104            elif v:
105                if index == 'translate':
106                    if key == 'lga':
107                        v_dump = self.portal_vocabularies.local_gov_areas.get(v)
108                        if not v_dump:
109                            v_dump = v
110                    elif key == 'aos':
111                        v_dump = self.portal_vocabularies.aos.get(v)
112                d[key] = v_dump
113            else:
114                d[key] = ''
115        return d
116
117###)
118
119    def addRecord(self, **data): ###(
120        # The uid is the same as "bed".
121        uid = data[self.key]
122        res = self.searchResults({"%s" % self.key : uid})
123        if len(res) > 0:
124            raise ValueError("More than one record with uid %s" % uid)
125        self.catalog_object(dict2ob(data), uid=uid)
126        return uid
127
128###)
129
130    def deleteRecord(self, uid):
131        self.uncatalog_object(uid)
132
133    def getRecordByKey(self,key):
134        if not key:
135            return None
136        res = self.evalAdvancedQuery(Eq(self.key,key))
137        if res:
138            return res[0]
139        return None
140
141    def searchAndSetRecord(self, **data):
142        raise NotImplemented
143
144    def modifyRecord(self, record=None, **data): ###(
145        #records = self.searchResults(uid=uid)
146        uid = data[self.key]
147        if record is None:
148            records = self.searchResults({"%s" % self.key : uid})
149            if len(records) > 1:
150                # Can not happen, but anyway...
151                raise ValueError("More than one record with uid %s" % uid)
152            if len(records) == 0:
153                raise KeyError("No record for uid %s" % uid)
154            record = records[0]
155        record_data = {}
156        for field in self.schema() + self.indexes():
157            record_data[field] = getattr(record, field)
158        # Add the updated data:
159        record_data.update(data)
160        self.catalog_object(dict2ob(record_data), uid)
161
162###)
163
164    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
165        if isinstance(name, str):
166            name =  (name,)
167        paths = self._catalog.uids.items()
168        i = 0
169        #import pdb;pdb.set_trace()
170        for p,rid in paths:
171            i += 1
172            metadata = self.getMetadataForRID(rid)
173            record_data = {}
174            for field in name:
175                record_data[field] = metadata.get(field)
176            uid = metadata.get(self.key)
177            self.catalog_object(dict2ob(record_data), uid, idxs=name,
178                                update_metadata=0)
179
180###)
181
182    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
183    def exportAllRecords(self):
184        "export a WAeUPTable"
185        #import pdb;pdb.set_trace()
186        fields = [field for field in self.schema()]
187        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
188        csv = []
189        csv.append(','.join(['"%s"' % fn for fn in fields]))
190        for uid in self._catalog.uids:
191            records = self.searchResults({"%s" % self.key : uid})
192            if len(records) > 1:
193                # Can not happen, but anyway...
194                raise ValueError("More than one record with uid %s" % uid)
195            if len(records) == 0:
196                raise KeyError("No record for uid %s" % uid)
197            rec = records[0]
198            csv.append(format % rec)
199        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
200        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
201
202###)
203
204    security.declareProtected(ModifyPortalContent,"dumpAll")###(
205    def dumpAll(self,index=None,value=None):
206        """dump all data in the table to a csv"""
207        member = self.portal_membership.getAuthenticatedMember()
208        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
209        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
210        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
211        res_list = []
212        lines = []
213        if hasattr(self,"export_keys"):
214            fields = self.export_keys
215        else:
216            fields = []
217            for f in self.schema():
218                fields.append(f)
219        headline = ','.join(fields)
220        out = open(export_file,"wb")
221        out.write(headline +'\n')
222        out.close()
223        out = open(export_file,"a")
224        csv_writer = csv.DictWriter(out,fields,)
225        if index is not None and value is not None:
226            records = self.evalAdvancedQuery(Eq(index,value))
227        else:
228            records = self()
229        nr2export = len(records)
230        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
231        chunk = 2000
232        total = 0
233        start = DateTime.DateTime().timeTime()
234        start_chunk = DateTime.DateTime().timeTime()
235        for record in records:
236            not_all = False
237            d = self.record2dict(fields,record,index)
238            lines.append(d)
239            total += 1
240            if total and not total % chunk or total == len(records):
241                csv_writer.writerows(lines)
242                anz = len(lines)
243                logger.info("wrote %(anz)d  total written %(total)d" % vars())
244                end_chunk = DateTime.DateTime().timeTime()
245                duration = end_chunk-start_chunk
246                per_record = duration/anz
247                till_now = end_chunk - start
248                avarage_per_record = till_now/total
249                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
250                estimated_end = estimated_end.strftime("%H:%M:%S")
251                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
252                start_chunk = DateTime.DateTime().timeTime()
253                lines = []
254        end = DateTime.DateTime().timeTime()
255        logger.info('total time %6.2f m' % ((end-start)/60))
256        import os
257        filename, extension = os.path.splitext(export_file)
258        from subprocess import call
259        msg = "wrote %(total)d records to %(export_file)s" % vars()
260        #try:
261        #    retcode = call('gzip %s' % (export_file),shell=True)
262        #    if retcode == 0:
263        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
264        #except OSError, e:
265        #    retcode = -99
266        #    logger.info("zip failed with %s" % e)
267        logger.info(msg)
268        args = {'portal_status_message': msg}
269        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
270        url = self.REQUEST.get('URL2')
271        return 'ready'
272        #return self.REQUEST.RESPONSE.redirect(url)
273    ###)
274
275    security.declarePrivate("_import_old") ###(
276    def _import_old(self,filename,schema,layout, mode,logger):
277        "import data from csv"
278        import transaction
279        import random
280        pm = self.portal_membership
281        member = pm.getAuthenticatedMember()
282        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
283        import_fn = "%s/import/%s.csv" % (i_home,filename)
284        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
285        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
286        start = True
287        tr_count = 1
288        total_imported = 0
289        total_not_imported = 0
290        total = 0
291        iname =  "%s" % filename
292        not_imported = []
293        imported = []
294        valid_records = []
295        invalid_records = []
296        d = {}
297        d['mode'] = mode
298        d['imported'] = total_imported
299        d['not_imported'] = total_not_imported
300        d['valid_records'] = valid_records
301        d['invalid_records'] = invalid_records
302        d['import_fn'] = import_fn
303        d['imported_fn'] = imported_fn
304        d['not_imported_fn'] = not_imported_fn
305        if schema is None:
306            em = 'No schema specified'
307            logger.error(em)
308            return d
309        if layout is None:
310            em = 'No layout specified'
311            logger.error(em)
312            return d
313        validators = {}
314        for widget in layout.keys():
315            try:
316                validators[widget] = layout[widget].validate
317            except AttributeError:
318                logger.info('%s has no validate attribute' % widget)
319                return d
320        # if mode == 'edit':
321        #     importer = self.importEdit
322        # elif mode == 'add':
323        #     importer = self.importAdd
324        # else:
325        #     importer = None
326        try:
327            items = csv.DictReader(open(import_fn,"rb"),
328                                   dialect="excel",
329                                   skipinitialspace=True)
330        except:
331            em = 'Error reading %s.csv' % filename
332            logger.error(em)
333            return d
334        #import pdb;pdb.set_trace()
335        for item in items:
336            if start:
337                start = False
338                logger.info('%s starts import from %s.csv' % (member,filename))
339                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
340                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
341                                   dialect="excel",
342                                   skipinitialspace=True).next()
343                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
344                diff2schema = set(import_keys).difference(set(schema.keys()))
345                diff2layout = set(import_keys).difference(set(layout.keys()))
346                if diff2layout:
347                    em = "not ignorable key(s) %s found in heading" % diff2layout
348                    logger.info(em)
349                    return d
350                s = ','.join(['"%s"' % fn for fn in import_keys])
351                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
352                #s = '"id",' + s
353                open(imported_fn,"a").write(s + '\n')
354                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
355                format_error = format + ',"%(Error)s"'
356                #format = '"%(id)s",'+ format
357                adapters = [MappingStorageAdapter(schema, item)]
358            dm = DataModel(item, adapters,context=self)
359            ds = DataStructure(data=item,datamodel=dm)
360            error_string = ""
361            #import pdb;pdb.set_trace()
362            for k in import_keys:
363                if not validators[k](ds,mode=mode):
364                    error_string += " %s : %s" % (k,ds.getError(k))
365            # if not error_string and importer:
366            #     item.update(dm)
367            #     item['id'],error = importer(item)
368            #     if error:
369            #         error_string += error
370            if error_string:
371                item['Error'] = error_string
372                invalid_records.append(dm)
373                not_imported.append(format_error % item)
374                total_not_imported += 1
375            else:
376                em = format % item
377                valid_records.append(dm)
378                imported.append(em)
379                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
380                tr_count += 1
381                total_imported += 1
382            total += 1
383        if len(imported) > 0:
384            open(imported_fn,"a").write('\n'.join(imported))
385        if len(not_imported) > 0:
386            open(not_imported_fn,"a").write('\n'.join(not_imported))
387        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
388        d['imported'] = total_imported
389        d['not_imported'] = total_not_imported
390        d['valid_records'] = valid_records
391        d['invalid_records'] = invalid_records
392        d['imported_fn'] = imported_fn
393        d['not_imported_fn'] = not_imported_fn
394        #logger.info(em)
395        return d
396    ###)
397
398    security.declarePrivate("_import") ###(
399    def _import_new(self,csv_items,schema, layout, mode,logger):
400        "import data from csv.Dictreader Instance"
401        start = True
402        tr_count = 1
403        total_imported = 0
404        total_not_imported = 0
405        total = 0
406        iname =  "%s" % filename
407        not_imported = []
408        valid_records = []
409        invalid_records = []
410        duplicate_records = []
411        d = {}
412        d['mode'] = mode
413        d['valid_records'] = valid_records
414        d['invalid_records'] = invalid_records
415        d['invalid_records'] = duplicate_records
416        # d['import_fn'] = import_fn
417        # d['imported_fn'] = imported_fn
418        # d['not_imported_fn'] = not_imported_fn
419        validators = {}
420        for widget in layout.keys():
421            try:
422                validators[widget] = layout[widget].validate
423            except AttributeError:
424                logger.info('%s has no validate attribute' % widget)
425                return d
426        for item in csv_items:
427            if start:
428                start = False
429                logger.info('%s starts import from %s.csv' % (member,filename))
430                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
431                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
432                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
433                diff2schema = set(import_keys).difference(set(schema.keys()))
434                diff2layout = set(import_keys).difference(set(layout.keys()))
435                if diff2layout:
436                    em = "not ignorable key(s) %s found in heading" % diff2layout
437                    logger.info(em)
438                    return d
439                # s = ','.join(['"%s"' % fn for fn in import_keys])
440                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
441                # #s = '"id",' + s
442                # open(imported_fn,"a").write(s + '\n')
443                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
444                # format_error = format + ',"%(Error)s"'
445                # #format = '"%(id)s",'+ format
446                adapters = [MappingStorageAdapter(schema, item)]
447            dm = DataModel(item, adapters,context=self)
448            ds = DataStructure(data=item,datamodel=dm)
449            error_string = ""
450            for k in import_keys:
451                if not validators[k](ds,mode=mode):
452                    error_string += " %s : %s" % (k,ds.getError(k))
453            if error_string:
454                item['Error'] = error_string
455                #invalid_records.append(dm)
456                invalid_records.append(item)
457                total_not_imported += 1
458            else:
459                em = format % item
460                valid_records.append(dm)
461                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
462                tr_count += 1
463                total_imported += 1
464            total += 1
465        # if len(imported) > 0:
466        #     open(imported_fn,"a").write('\n'.join(imported))
467        # if len(not_imported) > 0:
468        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
469        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
470        d['imported'] = total_imported
471        d['not_imported'] = total_not_imported
472        d['valid_records'] = valid_records
473        d['invalid_records'] = invalid_records
474        return d
475    ###)
476
477    security.declarePublic("missingValue")###(
478    def missingValue(self):
479        from Missing import MV
480        return MV
481    ###)
482###)
483
484class AccommodationTable(WAeUPTable): ###(
485
486    meta_type = 'WAeUP Accommodation Tool'
487    name = "portal_accommodation"
488    key = "bed"
489    not_occupied = NOT_OCCUPIED
490    def __init__(self,name=None):
491        if name ==  None:
492            name = self.name
493        WAeUPTable.__init__(self, name)
494
495    def searchAndReserveBed(self, student_id,bed_type,random_order=False): ###(
496        logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
497        records = self.evalAdvancedQuery(Eq('student',student_id))
498        if len(records) == 1:
499            #return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
500            logger.info('%s found (reserved) bed %s' % (student_id,records[0].bed))
501            return -1,records[0].bed
502        elif len(records) > 1:
503            logger.info('%s found more than one (reserved) bed' % (student_id))
504            return -3,'more than one bed'
505        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
506        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
507        if len(records) == 0:
508            logger.info('no bed %s available for %s' % (bed_type,student_id))
509            return -2,"no bed"
510        if random_order:
511            import random
512            bed_no = random.randint(0,len(records)-1)
513        else:
514            bed_no = 0
515        rec = records[bed_no]
516        self.modifyRecord(bed=rec.bed,student=student_id)
517        logger.info('%s booked bed %s' % (student_id,rec.bed))
518        return 1,rec.bed
519    ###)
520
521
522InitializeClass(AccommodationTable)
523
524###)
525
526class PinTable(WAeUPTable): ###(
527    from ZODB.POSException import ConflictError
528    security = ClassSecurityInfo()
529    meta_type = 'WAeUP Pin Tool'
530    name = "portal_pins"
531    key = 'pin'
532
533    def __init__(self,name=None):
534        if name ==  None:
535            name = self.name
536        WAeUPTable.__init__(self, name)
537
538    security.declareProtected(ModifyPortalContent,"dumpAll")###(
539    def dumpAll(self,include_unused=None,index=None):
540        """dump all data in the table to a csv"""
541        member = self.portal_membership.getAuthenticatedMember()
542        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
543        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
544        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
545        res_list = []
546        lines = []
547        if hasattr(self,"export_keys"):
548            fields = self.export_keys
549        else:
550            fields = []
551            for f in self.schema():
552                fields.append(f)
553        headline = ','.join(fields)
554        out = open(export_file,"wb")
555        out.write(headline +'\n')
556        out.close()
557        out = open(export_file,"a")
558        csv_writer = csv.DictWriter(out,fields,)
559        if include_unused is not None and str(member) not in ('admin','joachim'):
560            logger.info('%s tries to dump pintable with unused pins' % (member))
561            return
562        if include_unused is not None:
563            records = self()
564        else:
565            records = self.evalAdvancedQuery(~Eq('student',''))
566        nr2export = len(records)
567        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
568        chunk = 2000
569        total = 0
570        start = DateTime.DateTime().timeTime()
571        start_chunk = DateTime.DateTime().timeTime()
572        for record in records:
573            not_all = False
574            d = self.record2dict(fields,record,index)
575            lines.append(d)
576            total += 1
577            if total and not total % chunk or total == len(records):
578                csv_writer.writerows(lines)
579                anz = len(lines)
580                logger.info("wrote %(anz)d  total written %(total)d" % vars())
581                end_chunk = DateTime.DateTime().timeTime()
582                duration = end_chunk-start_chunk
583                per_record = duration/anz
584                till_now = end_chunk - start
585                avarage_per_record = till_now/total
586                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
587                estimated_end = estimated_end.strftime("%H:%M:%S")
588                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
589                start_chunk = DateTime.DateTime().timeTime()
590                lines = []
591        end = DateTime.DateTime().timeTime()
592        logger.info('total time %6.2f m' % ((end-start)/60))
593        import os
594        filename, extension = os.path.splitext(export_file)
595        from subprocess import call
596        msg = "wrote %(total)d records to %(export_file)s" % vars()
597        #try:
598        #    retcode = call('gzip %s' % (export_file),shell=True)
599        #    if retcode == 0:
600        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
601        #except OSError, e:
602        #    retcode = -99
603        #    logger.info("zip failed with %s" % e)
604        logger.info(msg)
605        args = {'portal_status_message': msg}
606        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
607        url = self.REQUEST.get('URL2')
608        return self.REQUEST.RESPONSE.redirect(url)
609    ###)
610
611
612
613    def searchAndSetRecord(self, uid, student_id,prefix):
614
615        # The following line must be activated after resetting the
616        # the portal_pins table. This is to avoid duplicate entries
617        # and disable duplicate payments.
618
619        #student_id = student_id.upper()
620
621        #records = self.searchResults(student = student_id)
622        #if len(records) > 0 and prefix in ('CLR','APP'):
623        #    for r in records:
624        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
625        #            return -2
626        records = self.searchResults({"%s" % self.key : uid})
627        if len(records) > 1:
628            # Can not happen, but anyway...
629            raise ValueError("More than one record with uid %s" % uid)
630        if len(records) == 0:
631            return -1,None
632        record = records[0]
633        if record.student == "":
634            record_data = {}
635            for field in self.schema() + self.indexes():
636                record_data[field] = getattr(record, field)
637            # Add the updated data:
638            record_data['student'] = student_id
639            try:
640                self.catalog_object(dict2ob(record_data), uid)
641                return 1,record
642            except ConflictError:
643                return 2,record
644        if record.student.upper() != student_id.upper():
645            return 0,record
646        if record.student.upper() == student_id.upper():
647            return 2,record
648        return -3,record
649InitializeClass(PinTable)
650###)
651
652class PumeResultsTable(WAeUPTable): ###(
653
654    meta_type = 'WAeUP PumeResults Tool'
655    name = "portal_pumeresults"
656    key = "jamb_reg_no"
657    def __init__(self,name=None):
658        if name ==  None:
659            name = self.name
660        WAeUPTable.__init__(self, name)
661
662
663InitializeClass(PumeResultsTable)
664
665###)
666
667class ApplicantsCatalog(WAeUPTable): ###(
668
669    meta_type = 'WAeUP Applicants Catalog'
670    name = "applicants_catalog"
671    key = "reg_no"
672    security = ClassSecurityInfo()
673    #export_keys = (
674    #               "reg_no",
675    #               "status",
676    #               "lastname",
677    #               "sex",
678    #               "date_of_birth",
679    #               "lga",
680    #               "email",
681    #               "phone",
682    #               "passport",
683    #               "entry_mode",
684    #               "pin",
685    #               "screening_type",
686    #               "registration_date",
687    #               "testdate",
688    #               "application_date",
689    #               "screening_date",
690    #               "faculty",
691    #               "department",
692    #               "course1",
693    #               "course2",
694    #               "course3",
695    #               "eng_score",
696    #               "subj1",
697    #               "subj1score",
698    #               "subj2",
699    #               "subj2score",
700    #               "subj3",
701    #               "subj3score",
702    #               "aggregate",
703    #               "course_admitted",
704    #               )
705
706    def __init__(self,name=None):
707        if name ==  None:
708            name = self.name
709        WAeUPTable.__init__(self, name)
710
711    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
712    def new_importCSV(self,filename="JAMB_data",
713                  schema_id="application",
714                  layout_id="import_application",
715                  mode='add'):
716        """ import JAMB data """
717        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
718        pm = self.portal_membership
719        member = pm.getAuthenticatedMember()
720        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
721        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
722        import_fn = "%s/import/%s.csv" % (i_home,filename)
723        if mode not in ('add','edit'):
724            logger.info("invalid mode: %s" % mode)
725        if os.path.exists(lock_fn):
726            logger.info("import of %(import_fn)s already in progress" % vars())
727            return
728        lock_file = open(lock_fn,"w")
729        lock_file.write("%(current)s \n" % vars())
730        lock_file.close()
731        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
732        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
733        stool = getToolByName(self, 'portal_schemas')
734        ltool = getToolByName(self, 'portal_layouts')
735        schema = stool._getOb(schema_id)
736        if schema is None:
737            em = 'No such schema %s' % schema_id
738            logger.error(em)
739            return
740        for postfix in ('_import',''):
741            layout_name = "%(layout_id)s%(postfix)s" % vars()
742            if hasattr(ltool,layout_name):
743                break
744        layout = ltool._getOb(layout_name)
745        if layout is None:
746            em = 'No such layout %s' % layout_id
747            logger.error(em)
748            return
749        try:
750            csv_file = csv.DictReader(open(import_fn,"rb"))
751        except:
752            em = 'Error reading %s.csv' % filename
753            logger.error(em)
754            return
755        d = self._import_new(csv_items,schema,layout,mode,logger)
756        imported = []
757        edited = []
758        duplicates = []
759        not_found = []
760        if len(d['valid_records']) > 0:
761            for record in d['valid_records']:
762                #import pdb;pdb.set_trace()
763                if mode == "add":
764                    try:
765                        self.addRecord(**dict(record.items()))
766                        imported.append(**dict(record.items()))
767                        logger.info("added %s" % record.items())
768                    except ValueError:
769                        dupplicate.append(**dict(record.items()))
770                        logger.info("duplicate %s" % record.items())
771                elif mode == "edit":
772                    try:
773                        self.modifyRecord(**dict(record.items()))
774                        edited.append(**dict(record.items()))
775                        logger.info("edited %s" % record.items())
776                    except KeyError:
777                        not_found.append(**dict(record.items()))
778                        logger.info("not found %s" % record.items())
779        invalid = d['invalid_records']
780        for itype in ("imported","edited","not_found","duplicate","invalid"):
781            outlist = locals[itype]
782            if len(outlist):
783                d = {}
784                for k in outlist[0].keys():
785                    d[k] = k
786                outlist[0] = d
787                outfile = open("file_name_%s" % itype,'w')
788                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
789                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
790###)
791
792    security.declareProtected(ModifyPortalContent,"importCSV")###(
793    def importCSV(self,filename="JAMB_data",
794                  schema_id="application",
795                  layout_id="application_pce",
796                  mode='add'):
797        """ import JAMB data """
798        stool = getToolByName(self, 'portal_schemas')
799        ltool = getToolByName(self, 'portal_layouts')
800        schema = stool._getOb(schema_id)
801        if schema is None:
802            em = 'No such schema %s' % schema_id
803            logger.error(em)
804            return
805        layout = ltool._getOb(layout_id)
806        if layout is None:
807            em = 'No such layout %s' % layout_id
808            logger.error(em)
809            return
810        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
811        d = self._import_old(filename,schema,layout,mode,logger)
812        if len(d['valid_records']) > 0:
813            for record in d['valid_records']:
814                #import pdb;pdb.set_trace()
815                if mode == "add":
816                    self.addRecord(**dict(record.items()))
817                    logger.info("added %s" % record.items())
818                elif mode == "edit":
819                    self.modifyRecord(**dict(record.items()))
820                    logger.info("edited %s" % record.items())
821                else:
822                    logger.info("invalid mode: %s" % mode)
823        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
824    ###)
825
826InitializeClass(ApplicantsCatalog)
827
828###)
829
830class StudentsCatalog(WAeUPTable): ###(
831    security = ClassSecurityInfo()
832
833    meta_type = 'WAeUP Students Catalog'
834    name = "students_catalog"
835    key = "id"
836    affected_types = {   ###(
837                      'StudentApplication':
838                      {'id': 'application',
839                       'fields':
840                       ('jamb_reg_no',
841                        'entry_mode',
842                        #'entry_level',
843                        'entry_session',
844                       )
845                      },
846                      'StudentClearance':
847                      {'id': 'clearance',
848                       'fields':
849                       ('matric_no',
850                        'lga',
851                       )
852                      },
853                      'StudentPersonal':
854                      {'id': 'personal',
855                       'fields':
856                       ('name',
857                        'sex',
858                        'perm_address',
859                        'email',
860                        'phone',
861                       )
862                      },
863                      'StudentStudyCourse':
864                      {'id': 'study_course',
865                       'fields':
866                       ('course', # study_course
867                        'faculty', # from certificate
868                        'department', # from certificate
869                        'end_level', # from certificate
870                        'level', # current_level
871                        'mode',  # from certificate
872                        'session', # current_session
873                        'verdict', # current_verdict
874                       )
875                      },
876                     }
877    ###)
878
879    def __init__(self,name=None):
880        if name ==  None:
881            name = self.name
882        WAeUPTable.__init__(self, name)
883        return
884
885    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
886        """ clears the whole enchilada """
887        self._catalog.clear()
888
889        if REQUEST and RESPONSE:
890            RESPONSE.redirect(
891              URL1 +
892              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
893
894    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
895        """ clear the catalog, then re-index everything """
896
897        elapse = time.time()
898        c_elapse = time.clock()
899
900        pgthreshold = self._getProgressThreshold()
901        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
902        self.refreshCatalog(clear=1, pghandler=handler)
903
904        elapse = time.time() - elapse
905        c_elapse = time.clock() - c_elapse
906
907        RESPONSE.redirect(
908            URL1 +
909            '/manage_catalogAdvanced?manage_tabs_message=' +
910            urllib.quote('Catalog Updated \n'
911                         'Total time: %s\n'
912                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
913    ###)
914
915    def fill_certificates_dict(self): ###(
916        "return certificate data in  dict"
917        certificates_brains = self.portal_catalog(portal_type ='Certificate')
918        d = {}
919        for cb in certificates_brains:
920            certificate_doc = cb.getObject().getContent()
921            cb_path = cb.getPath().split('/')
922            ld = {}
923            ld['faculty'] = cb_path[-4]
924            ld['department'] = cb_path[-3]
925            ld['end_level'] = getattr(certificate_doc,'end_level','999')
926            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
927            d[cb.getId] = ld
928        return d
929    ###)
930
931    def get_from_doc_department(self,doc,cached_data={}): ###(
932        "return the students department"
933        if doc is None:
934            return None
935        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
936            return self._v_certificates[doc.study_course]['department']
937        certificate_res = self.portal_catalog(id = doc.study_course)
938        if len(certificate_res) != 1:
939            return None
940        return certificate_res[0].getPath().split('/')[-3]
941
942    def get_from_doc_faculty(self,doc,cached_data={}):
943        "return the students faculty"
944        if doc is None:
945            return None
946        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
947            return self._v_certificates[doc.study_course]['faculty']
948        certificate_res = self.portal_catalog(id = doc.study_course)
949        if len(certificate_res) != 1:
950            return None
951        return certificate_res[0].getPath().split('/')[-4]
952
953    def get_from_doc_end_level(self,doc,cached_data={}):
954        "return the students end_level"
955        if doc is None:
956            return None
957        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
958            return self._v_certificates[doc.study_course]['end_level']
959        certificate_res = self.portal_catalog(id = doc.study_course)
960        if len(certificate_res) != 1:
961            return None
962        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
963
964    def get_from_doc_level(self,doc,cached_data={}):
965        "return the students level"
966        if doc is None:
967            return None
968        return getattr(doc,'current_level',None)
969
970    #def get_from_doc_mode(self,doc,cached_data={}):
971    #    "return the students mode"
972    #    if doc is None:
973    #        return None
974    #    cm = getattr(doc,'current_mode',None)
975    #    return cm
976   
977    def get_from_doc_mode(self,doc,cached_data={}):
978        "return the students mode"
979        if doc is None:
980            return None
981        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
982            return self._v_certificates[doc.study_course]['study_mode']
983        certificate_res = self.portal_catalog(id = doc.study_course)
984        if len(certificate_res) != 1:
985            return None
986        return getattr(certificate_res[0].getObject().getContent(),'study_mode','unknown')   
987
988
989    def get_from_doc_session(self,doc,cached_data={}):
990        "return the students current_session"
991        if doc is None:
992            return None
993        return getattr(doc,'current_session',None)
994
995    def get_from_doc_entry_session(self,doc,cached_data={}):
996        "return the students entry_session"
997        if doc is None:
998            return None
999        es = getattr(doc,'entry_session',None)
1000        if es is not None and len(es) < 3:
1001            return es
1002        elif len(es) == 9:
1003            return es[2:4]   
1004        try:
1005            digit = int(doc.jamb_reg_no[0])
1006        except:
1007            return "-1"
1008        if digit < 9:
1009            return "0%c" % doc.jamb_reg_no[0]
1010        return "9%c" % doc.jamb_reg_no[0]
1011
1012    def get_from_doc_course(self,doc,cached_data={}):
1013        "return the students study_course"
1014        if doc is None:
1015            return None
1016        return getattr(doc,'study_course',None)
1017
1018    def get_from_doc_name(self,doc,cached_data={}):
1019        "return the students name from the personal"
1020        if doc is None:
1021            return None
1022        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
1023
1024    def get_from_doc_verdict(self,doc,cached_data={}):
1025        "return the students study_course"
1026        if doc is None:
1027            return None
1028        return getattr(doc,'current_verdict',None)
1029    ###)
1030
1031    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1032        if not hasattr(self,'_v_certificates'):
1033            self._v_certificates = self.fill_certificates_dict()
1034        if isinstance(name, str):
1035            name = (name,)
1036        reindextypes = {}
1037        reindex_special = []
1038        for n in name:
1039            if n in ("review_state"):
1040                reindex_special.append(n)
1041            else:
1042                for pt in self.affected_types.keys():
1043                    if n in self.affected_types[pt]['fields']:
1044                        if reindextypes.has_key(pt):
1045                            reindextypes[pt].append(n)
1046                        else:
1047                            reindextypes[pt]= [n]
1048                        break
1049        #cached_data = {}
1050        #if set(name).intersection(set(('faculty','department','end_level','mode'))):
1051        #    cached_data = self.fill_certificates_dict()
1052        students = self.portal_catalog(portal_type="Student")
1053        if hasattr(self,'portal_catalog_real'):
1054            aq_portal = self.portal_catalog_real.evalAdvancedQuery
1055        else:
1056            aq_portal = self.portal_catalog.evalAdvancedQuery
1057        num_objects = len(students)
1058        if pghandler:
1059            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1060        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
1061        #import pdb;pdb.set_trace()
1062        for i in xrange(num_objects):
1063            if pghandler: pghandler.report(i)
1064            student_brain = students[i]
1065            student_object = student_brain.getObject()
1066            data = {}
1067            modified = False
1068            sid = data['id'] = student_brain.getId
1069            if reindex_special and 'review_state' in reindex_special:
1070                modified = True
1071                data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1072            sub_objects = False
1073            for pt in reindextypes.keys():
1074                modified = True
1075                try:
1076                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1077                    sub_objects = True
1078                except:
1079                    continue
1080                for field in set(name).intersection(self.affected_types[pt]['fields']):
1081                    if hasattr(self,'get_from_doc_%s' % field):
1082                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc)
1083                    else:
1084                        data[field] = getattr(doc,field)
1085            if not sub_objects and noattr:
1086                import_res = self.returning_import(id = sid)
1087                if not import_res:
1088                    continue
1089                import_record = import_res[0]
1090                data['matric_no'] = import_record.matric_no
1091                data['sex'] = import_record.Sex == 'F'
1092                data['name'] = "%s %s %s" % (import_record.Firstname,
1093                                             import_record.Middlename,
1094                                             import_record.Lastname)
1095                data['jamb_reg_no'] = import_record.Entryregno
1096            if modified:
1097                self.modifyRecord(**data)
1098        if pghandler: pghandler.finish()
1099    ###)
1100
1101    def refreshCatalog(self, clear=0, pghandler=None): ###(
1102        """ re-index everything we can find """
1103        students_folder = self.portal_url.getPortalObject().campus.students
1104        if clear:
1105            self._catalog.clear()
1106        students = self.portal_catalog(portal_type="Student")
1107        num_objects = len(students)
1108        #cached_data = self.fill_certificates_dict()
1109        if not hasattr(self,'_v_certificates'):
1110            self._v_certificates = self.fill_certificates_dict()
1111        if pghandler:
1112            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1113        for i in xrange(num_objects):
1114            if pghandler: pghandler.report(i)
1115            student_brain = students[i]
1116            spath = student_brain.getPath()
1117            student_object = student_brain.getObject()
1118            data = {}
1119            sid = data['id'] = student_brain.getId
1120            #data['review_state'] = student_brain.review_state
1121            data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1122            sub_objects = False
1123            for pt in self.affected_types.keys():
1124                modified = True
1125                try:
1126                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1127                    sub_objects = True
1128                except:
1129                    #from pdb import set_trace;set_trace()
1130                    continue
1131                for field in self.affected_types[pt]['fields']:
1132                    if hasattr(self,'get_from_doc_%s' % field):
1133                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1134                                                                              cached_data=cached_data)
1135                    else:
1136                        data[field] = getattr(doc,field,None)
1137            if not sub_objects:
1138                import_res = self.returning_import(id = sid)
1139                if not import_res:
1140                    continue
1141                import_record = import_res[0]
1142                data['matric_no'] = import_record.matric_no
1143                data['sex'] = import_record.Sex == 'F'
1144                data['name'] = "%s %s %s" % (import_record.Firstname,
1145                                             import_record.Middlename,
1146                                             import_record.Lastname)
1147                data['jamb_reg_no'] = import_record.Entryregno
1148            self.addRecord(**data)
1149        if pghandler: pghandler.finish()
1150    ###)
1151
1152    security.declarePrivate('notify_event_listener') ###(
1153    def notify_event_listener(self,event_type,object,infos):
1154        "listen for events"
1155        if not infos.has_key('rpath'):
1156            return
1157        pt = getattr(object,'portal_type',None)
1158        mt = getattr(object,'meta_type',None)
1159        students_catalog = self
1160        data = {}
1161        if pt == 'Student' and\
1162           mt == 'CPS Proxy Folder' and\
1163           event_type.startswith('workflow'):
1164            data['id'] = object.getId()
1165            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1166            students_catalog.modifyRecord(**data)
1167            return
1168        rpl = infos['rpath'].split('/')
1169        if pt == 'Student' and mt == 'CPS Proxy Folder':
1170            student_id = object.id
1171            if event_type == "sys_add_object":
1172                try:
1173                    self.addRecord(id = student_id)
1174                except ValueError:
1175                    pass
1176                return
1177            elif event_type == 'sys_del_object':
1178                self.deleteRecord(student_id)
1179        if pt not in self.affected_types.keys():
1180            return
1181        if event_type not in ('sys_modify_object'):
1182            return
1183        if mt == 'CPS Proxy Folder':
1184            return
1185        if not hasattr(self,'_v_certificates'):
1186            self._v_certificates = self.fill_certificates_dict()
1187        for field in self.affected_types[pt]['fields']:
1188            if hasattr(self,'get_from_doc_%s' % field):
1189                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1190            else:
1191                data[field] = getattr(object,field)
1192        data['id'] = rpl[2]
1193        self.modifyRecord(**data)
1194    ###)
1195
1196
1197InitializeClass(StudentsCatalog)
1198
1199###)
1200
1201class CertificatesCatalog(WAeUPTable): ###(
1202    security = ClassSecurityInfo()
1203
1204    meta_type = 'WAeUP Certificates Catalog'
1205    name =  "certificates_catalog"
1206    key = "code"
1207    def __init__(self,name=None):
1208        if name ==  None:
1209            name =  self.name
1210        WAeUPTable.__init__(self, name)
1211
1212    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1213        """ clear the catalog, then re-index everything """
1214
1215        elapse = time.time()
1216        c_elapse = time.clock()
1217
1218        pgthreshold = self._getProgressThreshold()
1219        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1220        self.refreshCatalog(clear=1, pghandler=handler)
1221
1222        elapse = time.time() - elapse
1223        c_elapse = time.clock() - c_elapse
1224
1225        RESPONSE.redirect(
1226            URL1 +
1227            '/manage_catalogAdvanced?manage_tabs_message=' +
1228            urllib.quote('Catalog Updated \n'
1229                         'Total time: %s\n'
1230                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1231    ###)
1232
1233    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1234        if isinstance(name, str):
1235            name = (name,)
1236        certificates = self.portal_catalog(portal_type="Certificate")
1237        num_objects = len(certificates)
1238        if pghandler:
1239            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1240        for i in xrange(num_objects):
1241            if pghandler: pghandler.report(i)
1242            certificate_brain = certificates[i]
1243            certificate_object = certificate_brain.getObject()
1244            pl = certificate_brain.getPath().split('/')
1245            data = {}
1246            cid = data[self.key] = certificate_brain.getId
1247            data['faculty'] = pl[-4]
1248            data['department'] = pl[-3]
1249            doc = certificate_object.getContent()
1250            for field in name:
1251                if field not in (self.key,'faculty','department'):
1252                    data[field] = getattr(doc,field)
1253            self.modifyRecord(**data)
1254        if pghandler: pghandler.finish()
1255    ###)
1256
1257    def refreshCatalog(self, clear=0, pghandler=None): ###(
1258        """ re-index everything we can find """
1259        if clear:
1260            self._catalog.clear()
1261        certificates = self.portal_catalog(portal_type="Certificate")
1262        num_objects = len(certificates)
1263        if pghandler:
1264            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1265        #from pdb import set_trace;set_trace()
1266        for i in xrange(num_objects):
1267            if pghandler: pghandler.report(i)
1268            certificate_brain = certificates[i]
1269            certificate_doc = certificate_brain.getObject().getContent()
1270            pl = certificate_brain.getPath().split('/')
1271            data = {}
1272            for field in self.schema():
1273                data[field] = getattr(certificate_doc,field,None)
1274            data[self.key] = certificate_brain.getId
1275            ai = pl.index('academics')
1276            data['faculty'] = pl[ai +1]
1277            data['department'] = pl[ai +2]
1278            if clear:
1279                self.addRecord(**data)
1280            else:
1281                self.modifyRecord(**data)
1282        if pghandler: pghandler.finish()
1283    ###)
1284
1285    security.declarePrivate('notify_event_listener') ###(
1286    def notify_event_listener(self,event_type,object,infos):
1287        "listen for events"
1288        if not infos.has_key('rpath'):
1289            return
1290        pt = getattr(object,'portal_type',None)
1291        mt = getattr(object,'meta_type',None)
1292        if pt != 'Certificate':
1293            return
1294        data = {}
1295        rpl = infos['rpath'].split('/')
1296        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1297            return
1298        certificate_id = object.getId()
1299        data[self.key] = certificate_id
1300        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1301            try:
1302                self.addRecord(**data)
1303            except ValueError:
1304                return
1305            certificate_id = object.getId()
1306            doc = object.getContent()
1307            if doc is None:
1308                return
1309            for field in self.schema():
1310                data[field] = getattr(doc,field,None)
1311            data[self.key] = certificate_id
1312            ai = rpl.index('academics')
1313            data['faculty'] = rpl[ai +1]
1314            data['department'] = rpl[ai +2]
1315            self.modifyRecord(**data)
1316            return
1317        if event_type == "sys_del_object":
1318            self.deleteRecord(certificate_id)
1319            return
1320        if event_type == "sys_modify_object" and mt == 'Certificate':
1321            #from pdb import set_trace;set_trace()
1322            for field in self.schema():
1323                data[field] = getattr(object,field,None)
1324            certificate_id = object.aq_parent.getId()
1325            data[self.key] = certificate_id
1326            ai = rpl.index('academics')
1327            data['faculty'] = rpl[ai +1]
1328            data['department'] = rpl[ai +2]
1329            self.modifyRecord(**data)
1330    ###)
1331
1332
1333InitializeClass(CertificatesCatalog)
1334###)
1335
1336class CoursesCatalog(WAeUPTable): ###(
1337    security = ClassSecurityInfo()
1338
1339    meta_type = 'WAeUP Courses Catalog'
1340    name =  "courses_catalog"
1341    key = "code"
1342    def __init__(self,name=None):
1343        if name ==  None:
1344            name =  self.name
1345        WAeUPTable.__init__(self, name)
1346
1347    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1348        """ clear the catalog, then re-index everything """
1349
1350        elapse = time.time()
1351        c_elapse = time.clock()
1352
1353        pgthreshold = self._getProgressThreshold()
1354        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1355        self.refreshCatalog(clear=1, pghandler=handler)
1356
1357        elapse = time.time() - elapse
1358        c_elapse = time.clock() - c_elapse
1359
1360        RESPONSE.redirect(
1361            URL1 +
1362            '/manage_catalogAdvanced?manage_tabs_message=' +
1363            urllib.quote('Catalog Updated \n'
1364                         'Total time: %s\n'
1365                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1366    ###)
1367
1368    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1369        if isinstance(name, str):
1370            name = (name,)
1371        courses = self.portal_catalog(portal_type="Course")
1372        num_objects = len(courses)
1373        if pghandler:
1374            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1375        for i in xrange(num_objects):
1376            if pghandler: pghandler.report(i)
1377            course_brain = courses[i]
1378            course_object = course_brain.getObject()
1379            pl = course_brain.getPath().split('/')
1380            data = {}
1381            cid = data[self.key] = course_brain.getId
1382            data['faculty'] = pl[-4]
1383            data['department'] = pl[-3]
1384            doc = course_object.getContent()
1385            for field in name:
1386                if field not in (self.key,'faculty','department'):
1387                    data[field] = getattr(doc,field)
1388            self.modifyRecord(**data)
1389        if pghandler: pghandler.finish()
1390    ###)
1391
1392    def refreshCatalog(self, clear=0, pghandler=None): ###(
1393        """ re-index everything we can find """
1394        if clear:
1395            self._catalog.clear()
1396        courses = self.portal_catalog(portal_type="Course")
1397        num_objects = len(courses)
1398        if pghandler:
1399            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1400        #from pdb import set_trace;set_trace()
1401        for i in xrange(num_objects):
1402            if pghandler: pghandler.report(i)
1403            course_brain = courses[i]
1404            course_doc = course_brain.getObject().getContent()
1405            pl = course_brain.getPath().split('/')
1406            data = {}
1407            for field in self.schema():
1408                data[field] = getattr(course_doc,field,None)
1409            data[self.key] = course_brain.getId
1410            ai = pl.index('academics')
1411            data['faculty'] = pl[ai +1]
1412            data['department'] = pl[ai +2]
1413            if clear:
1414                self.addRecord(**data)
1415            else:
1416                self.modifyRecord(**data)
1417        if pghandler: pghandler.finish()
1418    ###)
1419
1420    security.declarePrivate('notify_event_listener') ###(
1421    def notify_event_listener(self,event_type,object,infos):
1422        "listen for events"
1423        if not infos.has_key('rpath'):
1424            return
1425        pt = getattr(object,'portal_type',None)
1426        mt = getattr(object,'meta_type',None)
1427        if pt != 'Course':
1428            return
1429        data = {}
1430        rpl = infos['rpath'].split('/')
1431        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1432            return
1433        course_id = object.getId()
1434        data[self.key] = course_id
1435        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1436            try:
1437                self.addRecord(**data)
1438            except ValueError:
1439                return
1440            course_id = object.getId()
1441            doc = object.getContent()
1442            if doc is None:
1443                return
1444            for field in self.schema():
1445                data[field] = getattr(doc,field,None)
1446            data[self.key] = course_id
1447            ai = rpl.index('academics')
1448            data['faculty'] = rpl[ai +1]
1449            data['department'] = rpl[ai +2]
1450            self.modifyRecord(**data)
1451            return
1452        if event_type == "sys_del_object":
1453            self.deleteRecord(course_id)
1454            return
1455        if event_type == "sys_modify_object" and mt == 'Course':
1456            #from pdb import set_trace;set_trace()
1457            for field in self.schema():
1458                data[field] = getattr(object,field,None)
1459            course_id = object.aq_parent.getId()
1460            data[self.key] = course_id
1461            ai = rpl.index('academics')
1462            data['faculty'] = rpl[ai +1]
1463            data['department'] = rpl[ai +2]
1464            self.modifyRecord(**data)
1465    ###)
1466
1467
1468InitializeClass(CoursesCatalog)
1469###)
1470
1471class CourseResults(WAeUPTable): ###(
1472    security = ClassSecurityInfo()
1473
1474    meta_type = 'WAeUP Results Catalog'
1475    name = "course_results"
1476    key = "key" #student_id + level + course_id
1477    def __init__(self,name=None):
1478        if name ==  None:
1479            name = self.name
1480        WAeUPTable.__init__(self, name)
1481        self._queue = []
1482
1483    def addMultipleRecords(self, records): ###(
1484        """add many records"""
1485        existing_uids = []
1486        for data in records:
1487            uid = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1488            data['%s' % self.key] = uid
1489            query = Eq(self.key, uid)
1490            res = self.course_results.evalAdvancedQuery(query)
1491            if len(res) > 0:
1492                rec = res[0]
1493                equal = True
1494                for attr in ('student_id','level_id','course_id'):
1495                    if getattr(rec,attr,'') != data[attr]:
1496                        equal = False
1497                        break
1498                if equal:
1499                    existing_uids += uid,
1500                    continue
1501            self.catalog_object(dict2ob(data), uid=uid)
1502        return existing_uids
1503    ###)
1504
1505    def deleteResultsHere(self,level_id,student_id): ###(
1506        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1507        course_results = self.course_results.evalAdvancedQuery(query)
1508        #import pdb;pdb.set_trace()
1509        for result in course_results:
1510            self.deleteRecord(result.key)
1511    ###)
1512
1513    def moveResultsHere(self,level,student_id): ###(
1514        #import pdb;pdb.set_trace()
1515        level_id = level.getId()
1516        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1517        course_results = self.course_results.evalAdvancedQuery(query)
1518        existing_courses = [cr.code for cr in course_results]
1519        to_delete = []
1520        for code,obj in level.objectItems():
1521            to_delete.append(code)
1522            carry_over = False
1523            if code.endswith('_co'):
1524                carry_over = True
1525                code  = code[:-3]
1526            if code in existing_courses:
1527                continue
1528            course_result_doc = obj.getContent()
1529            data = {}
1530            course_id = code
1531            for field in self.schema():
1532                data[field] = getattr(course_result_doc,field,'')
1533            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1534            data['student_id'] = student_id
1535            data['level_id'] = level_id
1536            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1537            data['session_id'] = session_id
1538            #data['queue_status'] = OBJECT_CREATED
1539            data['code'] = course_id
1540            data['carry_over'] = carry_over
1541            self.catalog_object(dict2ob(data), uid=key)
1542        level.manage_delObjects(to_delete)
1543    ###)
1544
1545    def getCourses(self,student_id,level_id): ###(
1546        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1547        course_results = self.course_results.evalAdvancedQuery(query)
1548        carry_overs = []
1549        normal1 = []
1550        normal2 = []
1551        normal3 = []
1552        total_credits = 0
1553        gpa = 0
1554        for brain in course_results:
1555            d = {}
1556
1557            for field in self.schema():
1558                d[field] = getattr(brain,field,None)
1559                if repr(d[field]) == 'Missing.Value':
1560                    d[field] = ''
1561            d['weight'] = ''
1562            d['grade'] = ''
1563            d['score'] = ''
1564
1565            if str(brain.credits).isdigit():
1566                credits = int(brain.credits)
1567                total_credits += credits
1568                score = getattr(brain,'score',0)
1569                if score and str(score).isdigit() and int(score) > 0:
1570                    score = int(score)
1571                    grade,weight = self.getGradesFromScore(score,'')
1572                    gpa += weight * credits
1573                    d['weight'] = weight
1574                    d['grade'] = grade
1575                    d['score'] = score
1576
1577            #if str(brain.ca1).isdigit() and str(brain.ca2).isdigit() and str(brain.exam).isdigit():
1578            #    d['score_calc'] = int(brain.ca1) + int(brain.ca2) + int(brain.exam)
1579            #else:
1580            #    d['score_calc'] = ''
1581            try:
1582                d['score_calc'] = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1583            except:
1584                d['score_calc'] = ''
1585
1586            if d['score_calc']:
1587                grade = self.getGradesFromScore(d['score_calc'],level_id)
1588                d['grade'] = grade
1589
1590            d['coe'] = ''
1591            if brain.core_or_elective:
1592                d['coe'] = 'Core'
1593            elif brain.core_or_elective == False:
1594                d['coe'] = 'Elective'
1595            id = code = d['id'] = brain.code
1596            d['code'] = code
1597            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1598            if res:
1599                course = res[0]
1600                d['title'] = course.title
1601                # The courses_catalog contains strings and integers in its semester field.
1602                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1603                d['semester'] = str(course.semester)
1604            else:
1605                d['title'] = "Course has been removed from course list"
1606                d['semester'] = ''
1607            if brain.carry_over:
1608                d['coe'] = 'CO'
1609                carry_overs.append(d)
1610            else:
1611                if d['semester'] == '1':
1612                    normal1.append(d)
1613
1614                elif d['semester'] == '2':
1615                    normal2.append(d)
1616                else:
1617                    normal3.append(d)
1618        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1619        #                                "%(semester)s%(code)s" % y))
1620        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1621                                             "%(semester)s%(code)s" % y))
1622        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1623    ###)
1624
1625   
1626    # for transcript only
1627    def getAllCourses(self,student_id): ###(
1628        query = Eq('student_id',student_id)
1629        course_results = self.course_results.evalAdvancedQuery(query)
1630        courses = []
1631        for brain in course_results:
1632            d = {}
1633
1634            for field in self.schema():
1635                d[field] = getattr(brain,field,'')
1636
1637            d['weight'] = ''
1638            d['grade'] = ''
1639            d['score'] = ''
1640
1641            if str(brain.credits).isdigit():
1642                credits = int(brain.credits)
1643                score = getattr(brain,'score',0)
1644                if score and str(score).isdigit() and int(score) > 0:
1645                    score = int(score)
1646                    grade,weight = self.getGradesFromScore(score,'')
1647                    d['weight'] = weight
1648                    d['grade'] = grade
1649                    d['score'] = score
1650            d['coe'] = ''
1651            if brain.core_or_elective:
1652                d['coe'] = 'Core'
1653            elif brain.core_or_elective == False:
1654                d['coe'] = 'Elective'
1655            id = code = d['id'] = brain.code
1656            d['code'] = code
1657            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1658            if res:
1659                course = res[0]
1660                d['title'] = course.title
1661                # The courses_catalog contains strings and integers in its semester field.
1662                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1663                d['semester'] = str(course.semester)
1664            else:
1665                d['title'] = "Course has been removed from course list"
1666                d['semester'] = ''
1667            if brain.carry_over:
1668                d['coe'] = 'CO'
1669            courses.append(d)
1670        return courses
1671    ###)
1672   
1673    def getYearGroupAverage(self,session_id,level_id): ###(
1674        query = Eq('session_id',session_id) & Eq('level_id',level_id)
1675        course_results = self.course_results.evalAdvancedQuery(query)
1676        yga1 = 0
1677        yg1 = []
1678        counter1 = 0
1679        yga2 = 0
1680        yg2 = []
1681        counter2 = 0
1682        yga3 = 0
1683        yg3 = []
1684        counter3 = 0       
1685        #import pdb;pdb.set_trace()
1686        for brain in course_results:
1687            try:
1688                om = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1689                if not om > 0:
1690                    continue
1691                code = brain.code               
1692                res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1693                if res:
1694                    course = res[0]
1695                    # The courses_catalog contains strings and integers in its semester field.
1696                    # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1697                    semester = str(course.semester)
1698                else:
1699                    semester = ''
1700                if semester == '1':
1701                    counter1 += 1
1702                    yga1 += om
1703                    yg1.append(om)
1704                elif semester == '2':
1705                    counter2 += 1
1706                    yga2 += om     
1707                    yg2.append(om)   
1708                if semester == '3':
1709                    counter3 += 1
1710                    yga3 += om
1711                    yg3.append(om)
1712            except:
1713                continue               
1714        if counter1:
1715            yga1 /= counter1
1716            yga1 = '%.2f' % yga1   
1717        if counter2:
1718            yga2 /= counter2
1719            yga2 = '%.2f' % yga2   
1720        if counter3:
1721            yga3 /= counter3
1722            yga3 = '%.2f' % yga3                                   
1723        return yga1, yga2, yga3, counter1, counter2, counter3, yg1, yg2, yg3
1724    ###)
1725   
1726   
1727    #security.declarePublic("calculateCoursePosition")
1728    def calculateCoursePosition(self,session_id,level_id,code,score,semester=None):
1729        #"""calculate Course Position"""
1730        query = Eq('session_id',session_id) & Eq('level_id',level_id) & Eq('code',code)
1731        course_results = self.course_results.evalAdvancedQuery(query)
1732        ygc = []
1733        #import pdb;pdb.set_trace() 
1734        for brain in course_results:
1735            try:
1736                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
1737                    continue
1738                #code = brain.code   
1739                if semester:
1740                    res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1741                    if res:
1742                        course = res[0]
1743                        # The courses_catalog contains strings and integers in its semester field.
1744                        # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1745                        semester_from_course = str(course.semester)
1746                    else:
1747                        continue
1748                    if semester != semester_from_course:
1749                        continue
1750                ygc.append(float(brain.ca1) + float(brain.ca2) + float(brain.exam))
1751            except:
1752                continue     
1753        ygc.sort(reverse=True)
1754        if not len(ygc):
1755            return 'no result'
1756        #import pdb;pdb.set_trace()       
1757        for pos in range(len(ygc)):
1758            if ygc[pos] <= float(score):
1759                break
1760        output = {}   
1761        output['pos'] =  '%d of %d' % (pos+1,len(ygc))
1762        output['ygc'] = ygc
1763        return output
1764       
1765    security.declareProtected(ModifyPortalContent,"calculateAllCoursePositions")
1766    def calculateAllCoursePositions(self,session_id=None):
1767        """calculate All Course Positions"""
1768        logger = logging.getLogger('WAeUPTables.CourseResults.calculateAllCoursePositions')
1769        member = self.portal_membership.getAuthenticatedMember()
1770        logger.info('%s starts recalculation of positions in session %s' % (member,session_id))
1771        if session_id:
1772            query = Eq('session_id',session_id)
1773        else:
1774            return 'no session_id provided'
1775        course_results = self.course_results.evalAdvancedQuery(query)
1776        for brain in course_results:
1777            try:
1778                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
1779                    continue
1780                res = self.courses_catalog.evalAdvancedQuery(Eq('code',brain.code))
1781                if res:
1782                    course = res[0]
1783                    semester_from_course = str(course.semester)
1784                else:
1785                    continue                   
1786                score = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1787                pic = self.calculateCoursePosition('08',brain.level_id,brain.code,score,semester_from_course)['pos']
1788                data = {}
1789                data[self.key] = brain.key
1790                data['pic'] = pic
1791                self.modifyRecord(**data)
1792            except:
1793                continue       
1794        logger.info('recalculation finished')             
1795        return 'ready'   
1796   
1797    def exportRemoveAllCourses(self,student_id,export=False,remove=False): ###(
1798        ""
1799        query = Eq('student_id',student_id)
1800        cr_catalog = self.course_results
1801        course_results = cr_catalog.evalAdvancedQuery(query)
1802        courses = []
1803        fields = self.schema()
1804        format = '"%(' + ')s","%('.join(fields) + ')s"'
1805        for brain in course_results:
1806            d = {}
1807            for field in fields:
1808                d[field] = getattr(brain,field,'')
1809            courses.append(format % d)
1810               
1811        if export:
1812            export_file = "%s/export/course_results_removed.csv" % (i_home)
1813            if not os.path.exists(export_file): 
1814                file_handler = open(export_file,"a")
1815                headline = ','.join(fields)
1816                file_handler.write(headline +'\n')
1817            else:
1818                file_handler = open(export_file,"a")
1819            for line in courses:
1820                file_handler.write(line +'\n')
1821
1822        if remove:
1823            for brain in course_results:
1824                key = getattr(brain,'key','')
1825                cr_catalog.deleteRecord(key)
1826       
1827        return courses
1828    ###)   
1829   
1830   
1831
1832InitializeClass(CourseResults)
1833###)
1834
1835class OnlinePaymentsImport(WAeUPTable): ###(
1836
1837    meta_type = 'WAeUP Online Payment Transactions'
1838    name = "online_payments_import"
1839    key = "order_id"
1840    def __init__(self,name=None):
1841        if name ==  None:
1842            name = self.name
1843        WAeUPTable.__init__(self, name)
1844
1845
1846InitializeClass(OnlinePaymentsImport)
1847###)
1848
1849class ReturningImport(WAeUPTable): ###(
1850
1851    meta_type = 'Returning Import Table'
1852    name = "returning_import"
1853    key = "matric_no"
1854    def __init__(self,name=None):
1855        if name ==  None:
1856            name = self.name
1857        WAeUPTable.__init__(self, name)
1858
1859
1860InitializeClass(ReturningImport)
1861###)
1862
1863class ResultsImport(WAeUPTable): ###(
1864
1865    meta_type = 'Results Import Table'
1866    name = "results_import"
1867    key = "key"
1868    def __init__(self,name=None):
1869        if name ==  None:
1870            name = self.name
1871        WAeUPTable.__init__(self, name)
1872
1873
1874InitializeClass(ResultsImport)
1875
1876###)
1877
1878class PaymentsCatalog(WAeUPTable): ###(
1879    security = ClassSecurityInfo()
1880
1881    meta_type = 'WAeUP Payments Catalog'
1882    name = "payments_catalog"
1883    key = "order_id"
1884    def __init__(self,name=None):
1885        if name ==  None:
1886            name = self.name
1887        WAeUPTable.__init__(self, name)
1888
1889
1890    security.declarePrivate('notify_event_listener') ###(
1891    def notify_event_listener(self,event_type,object,infos):
1892        "listen for events"
1893        if not infos.has_key('rpath'):
1894            return
1895        pt = getattr(object,'portal_type',None)
1896        mt = getattr(object,'meta_type',None)
1897        data = {}
1898        if pt != 'Payment':
1899            return
1900        if event_type == 'sys_del_object' and mt == 'CPS Proxy Folder':
1901            self.deleteRecord(object.getContent().order_id)
1902        if mt == 'CPS Proxy Folder':
1903            return # is handled only for the real object
1904        if event_type not in ('sys_modify_object'):
1905            return
1906        for field in self.schema():
1907            data[field] = getattr(object,field,'')
1908        rpl = infos['rpath'].split('/')
1909        #import pdb;pdb.set_trace()
1910        student_id = rpl[-4]
1911        data['student_id'] = student_id
1912        modified = False
1913        try:
1914            self.modifyRecord(**data)
1915            modified = True
1916        except KeyError:
1917            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1918            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1919            pass
1920        if not modified:
1921            try:
1922                self.addRecord(**data)
1923            except:
1924                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1925                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1926        ###)
1927
1928
1929    def exportRemoveAllPayments(self,student_id,export=False,remove=False): ###(
1930        ""
1931        query = Eq('student_id',student_id)
1932        pm_catalog = self.payments_catalog
1933        payments = pm_catalog.evalAdvancedQuery(query)
1934        payments_dic = []
1935        fields = self.schema()
1936        format = '"%(' + ')s","%('.join(fields) + ')s"'
1937        for brain in payments:
1938            d = {}
1939            for field in fields:
1940                d[field] = getattr(brain,field,'')
1941            payments_dic.append(format % d)
1942               
1943        if export:
1944            export_file = "%s/export/payments_removed.csv" % (i_home)
1945            if not os.path.exists(export_file): 
1946                file_handler = open(export_file,"a")
1947                headline = ','.join(fields)
1948                file_handler.write(headline +'\n')
1949            else:
1950                file_handler = open(export_file,"a")
1951            for line in payments_dic:
1952                file_handler.write(line +'\n')
1953
1954        if remove:
1955            for brain in payments:
1956                order_id = getattr(brain,'order_id','')
1957                pm_catalog.deleteRecord(order_id)
1958       
1959        return payments_dic
1960    ###)   
1961
1962InitializeClass(PaymentsCatalog)
1963
1964###)
1965
1966class RemovedStudentIds(WAeUPTable): ###(
1967
1968    meta_type = 'WAeUP Removed StudentIds'
1969    name = "removed_student_ids"
1970    key = "id"
1971    def __init__(self,name=None):
1972        if name ==  None:
1973            name = self.name
1974        WAeUPTable.__init__(self, name)
1975
1976
1977InitializeClass(RemovedStudentIds)
1978
1979###)
1980
1981# BBB:
1982AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.