source: WAeUP_SRP/trunk/WAeUPTables.py @ 4517

Last change on this file since 4517 was 4306, checked in by Henrik Bettermann, 15 years ago

no scans required

  • Property svn:keywords set to Id
File size: 74.1 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 4306 2009-06-17 19:13:36Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re,os
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record,index): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            v_dump = v
98            if key == 'sex':
99                if v == True:
100                    v_dump = 'F'
101                elif v == False:
102                    v_dump = 'M'
103                d[key] = v_dump
104            elif v:
105                if index == 'translate':
106                    if key == 'lga':
107                        v_dump = self.portal_vocabularies.local_gov_areas.get(v)
108                        if not v_dump:
109                            v_dump = v
110                    elif key == 'aos':
111                        v_dump = self.portal_vocabularies.aos.get(v)
112                d[key] = v_dump
113            else:
114                d[key] = ''
115        return d
116
117###)
118
119    def addRecord(self, **data): ###(
120        # The uid is the same as "bed".
121        uid = data[self.key]
122        res = self.searchResults({"%s" % self.key : uid})
123        if len(res) > 0:
124            raise ValueError("More than one record with uid %s" % uid)
125        self.catalog_object(dict2ob(data), uid=uid)
126        return uid
127
128###)
129
130    def deleteRecord(self, uid):
131        self.uncatalog_object(uid)
132
133    def getRecordByKey(self,key):
134        if not key:
135            return None
136        res = self.evalAdvancedQuery(Eq(self.key,key))
137        if res:
138            return res[0]
139        return None
140
141    def searchAndSetRecord(self, **data):
142        raise NotImplemented
143
144    def modifyRecord(self, record=None, **data): ###(
145        #records = self.searchResults(uid=uid)
146        uid = data[self.key]
147        if record is None:
148            records = self.searchResults({"%s" % self.key : uid})
149            if len(records) > 1:
150                # Can not happen, but anyway...
151                raise ValueError("More than one record with uid %s" % uid)
152            if len(records) == 0:
153                raise KeyError("No record for uid %s" % uid)
154            record = records[0]
155        record_data = {}
156        for field in self.schema() + self.indexes():
157            record_data[field] = getattr(record, field)
158        # Add the updated data:
159        record_data.update(data)
160        self.catalog_object(dict2ob(record_data), uid)
161
162###)
163
164    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
165        if isinstance(name, str):
166            name =  (name,)
167        paths = self._catalog.uids.items()
168        i = 0
169        #import pdb;pdb.set_trace()
170        for p,rid in paths:
171            i += 1
172            metadata = self.getMetadataForRID(rid)
173            record_data = {}
174            for field in name:
175                record_data[field] = metadata.get(field)
176            uid = metadata.get(self.key)
177            self.catalog_object(dict2ob(record_data), uid, idxs=name,
178                                update_metadata=0)
179
180###)
181
182    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
183    def exportAllRecords(self):
184        "export a WAeUPTable"
185        #import pdb;pdb.set_trace()
186        fields = [field for field in self.schema()]
187        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
188        csv = []
189        csv.append(','.join(['"%s"' % fn for fn in fields]))
190        for uid in self._catalog.uids:
191            records = self.searchResults({"%s" % self.key : uid})
192            if len(records) > 1:
193                # Can not happen, but anyway...
194                raise ValueError("More than one record with uid %s" % uid)
195            if len(records) == 0:
196                raise KeyError("No record for uid %s" % uid)
197            rec = records[0]
198            csv.append(format % rec)
199        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
200        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
201
202###)
203
204    security.declareProtected(ModifyPortalContent,"dumpAll")###(
205    def dumpAll(self,index=None,value=None):
206        """dump all data in the table to a csv"""
207        member = self.portal_membership.getAuthenticatedMember()
208        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
209        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
210        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
211        res_list = []
212        lines = []
213        if hasattr(self,"export_keys"):
214            fields = self.export_keys
215        else:
216            fields = []
217            for f in self.schema():
218                fields.append(f)
219        headline = ','.join(fields)
220        out = open(export_file,"wb")
221        out.write(headline +'\n')
222        out.close()
223        out = open(export_file,"a")
224        csv_writer = csv.DictWriter(out,fields,)
225        if index is not None and value is not None:
226            records = self.evalAdvancedQuery(Eq(index,value))
227        else:
228            records = self()
229        nr2export = len(records)
230        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
231        chunk = 2000
232        total = 0
233        start = DateTime.DateTime().timeTime()
234        start_chunk = DateTime.DateTime().timeTime()
235        for record in records:
236            not_all = False
237            d = self.record2dict(fields,record,index)
238            lines.append(d)
239            total += 1
240            if total and not total % chunk or total == len(records):
241                csv_writer.writerows(lines)
242                anz = len(lines)
243                logger.info("wrote %(anz)d  total written %(total)d" % vars())
244                end_chunk = DateTime.DateTime().timeTime()
245                duration = end_chunk-start_chunk
246                per_record = duration/anz
247                till_now = end_chunk - start
248                avarage_per_record = till_now/total
249                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
250                estimated_end = estimated_end.strftime("%H:%M:%S")
251                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
252                start_chunk = DateTime.DateTime().timeTime()
253                lines = []
254        end = DateTime.DateTime().timeTime()
255        logger.info('total time %6.2f m' % ((end-start)/60))
256        import os
257        filename, extension = os.path.splitext(export_file)
258        from subprocess import call
259        msg = "wrote %(total)d records to %(export_file)s" % vars()
260        #try:
261        #    retcode = call('gzip %s' % (export_file),shell=True)
262        #    if retcode == 0:
263        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
264        #except OSError, e:
265        #    retcode = -99
266        #    logger.info("zip failed with %s" % e)
267        logger.info(msg)
268        args = {'portal_status_message': msg}
269        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
270        url = self.REQUEST.get('URL2')
271        return self.REQUEST.RESPONSE.redirect(url)
272    ###)
273
274    security.declarePrivate("_import_old") ###(
275    def _import_old(self,filename,schema,layout, mode,logger):
276        "import data from csv"
277        import transaction
278        import random
279        pm = self.portal_membership
280        member = pm.getAuthenticatedMember()
281        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
282        import_fn = "%s/import/%s.csv" % (i_home,filename)
283        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
284        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
285        start = True
286        tr_count = 1
287        total_imported = 0
288        total_not_imported = 0
289        total = 0
290        iname =  "%s" % filename
291        not_imported = []
292        imported = []
293        valid_records = []
294        invalid_records = []
295        d = {}
296        d['mode'] = mode
297        d['imported'] = total_imported
298        d['not_imported'] = total_not_imported
299        d['valid_records'] = valid_records
300        d['invalid_records'] = invalid_records
301        d['import_fn'] = import_fn
302        d['imported_fn'] = imported_fn
303        d['not_imported_fn'] = not_imported_fn
304        if schema is None:
305            em = 'No schema specified'
306            logger.error(em)
307            return d
308        if layout is None:
309            em = 'No layout specified'
310            logger.error(em)
311            return d
312        validators = {}
313        for widget in layout.keys():
314            try:
315                validators[widget] = layout[widget].validate
316            except AttributeError:
317                logger.info('%s has no validate attribute' % widget)
318                return d
319        # if mode == 'edit':
320        #     importer = self.importEdit
321        # elif mode == 'add':
322        #     importer = self.importAdd
323        # else:
324        #     importer = None
325        try:
326            items = csv.DictReader(open(import_fn,"rb"),
327                                   dialect="excel",
328                                   skipinitialspace=True)
329        except:
330            em = 'Error reading %s.csv' % filename
331            logger.error(em)
332            return d
333        #import pdb;pdb.set_trace()
334        for item in items:
335            if start:
336                start = False
337                logger.info('%s starts import from %s.csv' % (member,filename))
338                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
339                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
340                                   dialect="excel",
341                                   skipinitialspace=True).next()
342                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
343                diff2schema = set(import_keys).difference(set(schema.keys()))
344                diff2layout = set(import_keys).difference(set(layout.keys()))
345                if diff2layout:
346                    em = "not ignorable key(s) %s found in heading" % diff2layout
347                    logger.info(em)
348                    return d
349                s = ','.join(['"%s"' % fn for fn in import_keys])
350                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
351                #s = '"id",' + s
352                open(imported_fn,"a").write(s + '\n')
353                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
354                format_error = format + ',"%(Error)s"'
355                #format = '"%(id)s",'+ format
356                adapters = [MappingStorageAdapter(schema, item)]
357            dm = DataModel(item, adapters,context=self)
358            ds = DataStructure(data=item,datamodel=dm)
359            error_string = ""
360            #import pdb;pdb.set_trace()
361            for k in import_keys:
362                if not validators[k](ds,mode=mode):
363                    error_string += " %s : %s" % (k,ds.getError(k))
364            # if not error_string and importer:
365            #     item.update(dm)
366            #     item['id'],error = importer(item)
367            #     if error:
368            #         error_string += error
369            if error_string:
370                item['Error'] = error_string
371                invalid_records.append(dm)
372                not_imported.append(format_error % item)
373                total_not_imported += 1
374            else:
375                em = format % item
376                valid_records.append(dm)
377                imported.append(em)
378                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
379                tr_count += 1
380                total_imported += 1
381            total += 1
382        if len(imported) > 0:
383            open(imported_fn,"a").write('\n'.join(imported))
384        if len(not_imported) > 0:
385            open(not_imported_fn,"a").write('\n'.join(not_imported))
386        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
387        d['imported'] = total_imported
388        d['not_imported'] = total_not_imported
389        d['valid_records'] = valid_records
390        d['invalid_records'] = invalid_records
391        d['imported_fn'] = imported_fn
392        d['not_imported_fn'] = not_imported_fn
393        #logger.info(em)
394        return d
395    ###)
396
397    security.declarePrivate("_import") ###(
398    def _import_new(self,csv_items,schema, layout, mode,logger):
399        "import data from csv.Dictreader Instance"
400        start = True
401        tr_count = 1
402        total_imported = 0
403        total_not_imported = 0
404        total = 0
405        iname =  "%s" % filename
406        not_imported = []
407        valid_records = []
408        invalid_records = []
409        duplicate_records = []
410        d = {}
411        d['mode'] = mode
412        d['valid_records'] = valid_records
413        d['invalid_records'] = invalid_records
414        d['invalid_records'] = duplicate_records
415        # d['import_fn'] = import_fn
416        # d['imported_fn'] = imported_fn
417        # d['not_imported_fn'] = not_imported_fn
418        validators = {}
419        for widget in layout.keys():
420            try:
421                validators[widget] = layout[widget].validate
422            except AttributeError:
423                logger.info('%s has no validate attribute' % widget)
424                return d
425        for item in csv_items:
426            if start:
427                start = False
428                logger.info('%s starts import from %s.csv' % (member,filename))
429                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
430                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
431                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
432                diff2schema = set(import_keys).difference(set(schema.keys()))
433                diff2layout = set(import_keys).difference(set(layout.keys()))
434                if diff2layout:
435                    em = "not ignorable key(s) %s found in heading" % diff2layout
436                    logger.info(em)
437                    return d
438                # s = ','.join(['"%s"' % fn for fn in import_keys])
439                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
440                # #s = '"id",' + s
441                # open(imported_fn,"a").write(s + '\n')
442                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
443                # format_error = format + ',"%(Error)s"'
444                # #format = '"%(id)s",'+ format
445                adapters = [MappingStorageAdapter(schema, item)]
446            dm = DataModel(item, adapters,context=self)
447            ds = DataStructure(data=item,datamodel=dm)
448            error_string = ""
449            for k in import_keys:
450                if not validators[k](ds,mode=mode):
451                    error_string += " %s : %s" % (k,ds.getError(k))
452            if error_string:
453                item['Error'] = error_string
454                #invalid_records.append(dm)
455                invalid_records.append(item)
456                total_not_imported += 1
457            else:
458                em = format % item
459                valid_records.append(dm)
460                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
461                tr_count += 1
462                total_imported += 1
463            total += 1
464        # if len(imported) > 0:
465        #     open(imported_fn,"a").write('\n'.join(imported))
466        # if len(not_imported) > 0:
467        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
468        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
469        d['imported'] = total_imported
470        d['not_imported'] = total_not_imported
471        d['valid_records'] = valid_records
472        d['invalid_records'] = invalid_records
473        return d
474    ###)
475
476    security.declarePublic("missingValue")###(
477    def missingValue(self):
478        from Missing import MV
479        return MV
480    ###)
481###)
482
483class AccommodationTable(WAeUPTable): ###(
484
485    meta_type = 'WAeUP Accommodation Tool'
486    name = "portal_accommodation"
487    key = "bed"
488    not_occupied = NOT_OCCUPIED
489    def __init__(self,name=None):
490        if name ==  None:
491            name = self.name
492        WAeUPTable.__init__(self, name)
493
494    def searchAndReserveBed(self, student_id,bed_type,random_order=False): ###(
495        logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
496        records = self.evalAdvancedQuery(Eq('student',student_id))
497        if len(records) == 1:
498            #return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
499            logger.info('%s found (reserved) bed %s' % (student_id,records[0].bed))
500            return -1,records[0].bed
501        elif len(records) > 1:
502            logger.info('%s found more than one (reserved) bed' % (student_id))
503            return -3,'more than one bed'
504        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
505        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
506        if len(records) == 0:
507            logger.info('no bed %s available for %s' % (bed_type,student_id))
508            return -2,"no bed"
509        if random_order:
510            import random
511            bed_no = random.randint(0,len(records)-1)
512        else:
513            bed_no = 0
514        rec = records[bed_no]
515        self.modifyRecord(bed=rec.bed,student=student_id)
516        logger.info('%s booked bed %s' % (student_id,rec.bed))
517        return 1,rec.bed
518    ###)
519
520
521InitializeClass(AccommodationTable)
522
523###)
524
525class PinTable(WAeUPTable): ###(
526    from ZODB.POSException import ConflictError
527    security = ClassSecurityInfo()
528    meta_type = 'WAeUP Pin Tool'
529    name = "portal_pins"
530    key = 'pin'
531
532    def __init__(self,name=None):
533        if name ==  None:
534            name = self.name
535        WAeUPTable.__init__(self, name)
536
537    security.declareProtected(ModifyPortalContent,"dumpAll")###(
538    def dumpAll(self,include_unused=None):
539        """dump all data in the table to a csv"""
540        member = self.portal_membership.getAuthenticatedMember()
541        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
542        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
543        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
544        res_list = []
545        lines = []
546        if hasattr(self,"export_keys"):
547            fields = self.export_keys
548        else:
549            fields = []
550            for f in self.schema():
551                fields.append(f)
552        headline = ','.join(fields)
553        out = open(export_file,"wb")
554        out.write(headline +'\n')
555        out.close()
556        out = open(export_file,"a")
557        csv_writer = csv.DictWriter(out,fields,)
558        if include_unused is not None and str(member) not in ('admin','joachim'):
559            logger.info('%s tries to dump pintable with unused pins' % (member))
560            return
561        if include_unused is not None:
562            records = self()
563        else:
564            records = self.evalAdvancedQuery(~Eq('student',''))
565        nr2export = len(records)
566        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
567        chunk = 2000
568        total = 0
569        start = DateTime.DateTime().timeTime()
570        start_chunk = DateTime.DateTime().timeTime()
571        for record in records:
572            not_all = False
573            d = self.record2dict(fields,record)
574            lines.append(d)
575            total += 1
576            if total and not total % chunk or total == len(records):
577                csv_writer.writerows(lines)
578                anz = len(lines)
579                logger.info("wrote %(anz)d  total written %(total)d" % vars())
580                end_chunk = DateTime.DateTime().timeTime()
581                duration = end_chunk-start_chunk
582                per_record = duration/anz
583                till_now = end_chunk - start
584                avarage_per_record = till_now/total
585                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
586                estimated_end = estimated_end.strftime("%H:%M:%S")
587                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
588                start_chunk = DateTime.DateTime().timeTime()
589                lines = []
590        end = DateTime.DateTime().timeTime()
591        logger.info('total time %6.2f m' % ((end-start)/60))
592        import os
593        filename, extension = os.path.splitext(export_file)
594        from subprocess import call
595        msg = "wrote %(total)d records to %(export_file)s" % vars()
596        #try:
597        #    retcode = call('gzip %s' % (export_file),shell=True)
598        #    if retcode == 0:
599        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
600        #except OSError, e:
601        #    retcode = -99
602        #    logger.info("zip failed with %s" % e)
603        logger.info(msg)
604        args = {'portal_status_message': msg}
605        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
606        url = self.REQUEST.get('URL2')
607        return self.REQUEST.RESPONSE.redirect(url)
608    ###)
609
610
611
612    def searchAndSetRecord(self, uid, student_id,prefix):
613
614        # The following line must be activated after resetting the
615        # the portal_pins table. This is to avoid duplicate entries
616        # and disable duplicate payments.
617
618        #student_id = student_id.upper()
619
620        #records = self.searchResults(student = student_id)
621        #if len(records) > 0 and prefix in ('CLR','APP'):
622        #    for r in records:
623        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
624        #            return -2
625        records = self.searchResults({"%s" % self.key : uid})
626        if len(records) > 1:
627            # Can not happen, but anyway...
628            raise ValueError("More than one record with uid %s" % uid)
629        if len(records) == 0:
630            return -1,None
631        record = records[0]
632        if record.student == "":
633            record_data = {}
634            for field in self.schema() + self.indexes():
635                record_data[field] = getattr(record, field)
636            # Add the updated data:
637            record_data['student'] = student_id
638            try:
639                self.catalog_object(dict2ob(record_data), uid)
640                return 1,record
641            except ConflictError:
642                return 2,record
643        if record.student.upper() != student_id.upper():
644            return 0,record
645        if record.student.upper() == student_id.upper():
646            return 2,record
647        return -3,record
648InitializeClass(PinTable)
649###)
650
651class PumeResultsTable(WAeUPTable): ###(
652
653    meta_type = 'WAeUP PumeResults Tool'
654    name = "portal_pumeresults"
655    key = "jamb_reg_no"
656    def __init__(self,name=None):
657        if name ==  None:
658            name = self.name
659        WAeUPTable.__init__(self, name)
660
661
662InitializeClass(PumeResultsTable)
663
664###)
665
666class ApplicantsCatalog(WAeUPTable): ###(
667
668    meta_type = 'WAeUP Applicants Catalog'
669    name = "applicants_catalog"
670    key = "reg_no"
671    security = ClassSecurityInfo()
672    #export_keys = (
673    #               "reg_no",
674    #               "status",
675    #               "lastname",
676    #               "sex",
677    #               "date_of_birth",
678    #               "lga",
679    #               "email",
680    #               "phone",
681    #               "passport",
682    #               "entry_mode",
683    #               "pin",
684    #               "screening_type",
685    #               "registration_date",
686    #               "testdate",
687    #               "application_date",
688    #               "screening_date",
689    #               "faculty",
690    #               "department",
691    #               "course1",
692    #               "course2",
693    #               "course3",
694    #               "eng_score",
695    #               "subj1",
696    #               "subj1score",
697    #               "subj2",
698    #               "subj2score",
699    #               "subj3",
700    #               "subj3score",
701    #               "aggregate",
702    #               "course_admitted",
703    #               )
704
705    def __init__(self,name=None):
706        if name ==  None:
707            name = self.name
708        WAeUPTable.__init__(self, name)
709
710    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
711    def new_importCSV(self,filename="JAMB_data",
712                  schema_id="application",
713                  layout_id="import_application",
714                  mode='add'):
715        """ import JAMB data """
716        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
717        pm = self.portal_membership
718        member = pm.getAuthenticatedMember()
719        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
720        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
721        import_fn = "%s/import/%s.csv" % (i_home,filename)
722        if mode not in ('add','edit'):
723            logger.info("invalid mode: %s" % mode)
724        if os.path.exists(lock_fn):
725            logger.info("import of %(import_fn)s already in progress" % vars())
726            return
727        lock_file = open(lock_fn,"w")
728        lock_file.write("%(current)s \n" % vars())
729        lock_file.close()
730        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
731        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
732        stool = getToolByName(self, 'portal_schemas')
733        ltool = getToolByName(self, 'portal_layouts')
734        schema = stool._getOb(schema_id)
735        if schema is None:
736            em = 'No such schema %s' % schema_id
737            logger.error(em)
738            return
739        for postfix in ('_import',''):
740            layout_name = "%(layout_id)s%(postfix)s" % vars()
741            if hasattr(ltool,layout_name):
742                break
743        layout = ltool._getOb(layout_name)
744        if layout is None:
745            em = 'No such layout %s' % layout_id
746            logger.error(em)
747            return
748        try:
749            csv_file = csv.DictReader(open(import_fn,"rb"))
750        except:
751            em = 'Error reading %s.csv' % filename
752            logger.error(em)
753            return
754        d = self._import_new(csv_items,schema,layout,mode,logger)
755        imported = []
756        edited = []
757        duplicates = []
758        not_found = []
759        if len(d['valid_records']) > 0:
760            for record in d['valid_records']:
761                #import pdb;pdb.set_trace()
762                if mode == "add":
763                    try:
764                        self.addRecord(**dict(record.items()))
765                        imported.append(**dict(record.items()))
766                        logger.info("added %s" % record.items())
767                    except ValueError:
768                        dupplicate.append(**dict(record.items()))
769                        logger.info("duplicate %s" % record.items())
770                elif mode == "edit":
771                    try:
772                        self.modifyRecord(**dict(record.items()))
773                        edited.append(**dict(record.items()))
774                        logger.info("edited %s" % record.items())
775                    except KeyError:
776                        not_found.append(**dict(record.items()))
777                        logger.info("not found %s" % record.items())
778        invalid = d['invalid_records']
779        for itype in ("imported","edited","not_found","duplicate","invalid"):
780            outlist = locals[itype]
781            if len(outlist):
782                d = {}
783                for k in outlist[0].keys():
784                    d[k] = k
785                outlist[0] = d
786                outfile = open("file_name_%s" % itype,'w')
787                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
788                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
789###)
790
791    security.declareProtected(ModifyPortalContent,"importCSV")###(
792    def importCSV(self,filename="JAMB_data",
793                  schema_id="application",
794                  layout_id="application_pce",
795                  mode='add'):
796        """ import JAMB data """
797        stool = getToolByName(self, 'portal_schemas')
798        ltool = getToolByName(self, 'portal_layouts')
799        schema = stool._getOb(schema_id)
800        if schema is None:
801            em = 'No such schema %s' % schema_id
802            logger.error(em)
803            return
804        layout = ltool._getOb(layout_id)
805        if layout is None:
806            em = 'No such layout %s' % layout_id
807            logger.error(em)
808            return
809        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
810        d = self._import_old(filename,schema,layout,mode,logger)
811        if len(d['valid_records']) > 0:
812            for record in d['valid_records']:
813                #import pdb;pdb.set_trace()
814                if mode == "add":
815                    self.addRecord(**dict(record.items()))
816                    logger.info("added %s" % record.items())
817                elif mode == "edit":
818                    self.modifyRecord(**dict(record.items()))
819                    logger.info("edited %s" % record.items())
820                else:
821                    logger.info("invalid mode: %s" % mode)
822        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
823    ###)
824
825InitializeClass(ApplicantsCatalog)
826
827###)
828
829class StudentsCatalog(WAeUPTable): ###(
830    security = ClassSecurityInfo()
831
832    meta_type = 'WAeUP Students Catalog'
833    name = "students_catalog"
834    key = "id"
835    affected_types = {   ###(
836                      'StudentApplication':
837                      {'id': 'application',
838                       'fields':
839                       ('jamb_reg_no',
840                        'entry_mode',
841                        #'entry_level',
842                        'entry_session',
843                       )
844                      },
845                      'StudentClearance':
846                      {'id': 'clearance',
847                       'fields':
848                       ('matric_no',
849                        'lga',
850                       )
851                      },
852                      'StudentPersonal':
853                      {'id': 'personal',
854                       'fields':
855                       ('name',
856                        'sex',
857                        'perm_address',
858                        'email',
859                        'phone',
860                       )
861                      },
862                      'StudentStudyCourse':
863                      {'id': 'study_course',
864                       'fields':
865                       ('course', # study_course
866                        'faculty', # from certificate
867                        'department', # from certificate
868                        'end_level', # from certificate
869                        'level', # current_level
870                        'mode',  # from certificate
871                        'session', # current_session
872                        'verdict', # current_verdict
873                       )
874                      },
875                     }
876    ###)
877
878    def __init__(self,name=None):
879        if name ==  None:
880            name = self.name
881        WAeUPTable.__init__(self, name)
882        return
883
884    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
885        """ clears the whole enchilada """
886        self._catalog.clear()
887
888        if REQUEST and RESPONSE:
889            RESPONSE.redirect(
890              URL1 +
891              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
892
893    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
894        """ clear the catalog, then re-index everything """
895
896        elapse = time.time()
897        c_elapse = time.clock()
898
899        pgthreshold = self._getProgressThreshold()
900        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
901        self.refreshCatalog(clear=1, pghandler=handler)
902
903        elapse = time.time() - elapse
904        c_elapse = time.clock() - c_elapse
905
906        RESPONSE.redirect(
907            URL1 +
908            '/manage_catalogAdvanced?manage_tabs_message=' +
909            urllib.quote('Catalog Updated \n'
910                         'Total time: %s\n'
911                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
912    ###)
913
914    def fill_certificates_dict(self): ###(
915        "return certificate data in  dict"
916        certificates_brains = self.portal_catalog(portal_type ='Certificate')
917        d = {}
918        for cb in certificates_brains:
919            certificate_doc = cb.getObject().getContent()
920            cb_path = cb.getPath().split('/')
921            ld = {}
922            ld['faculty'] = cb_path[-4]
923            ld['department'] = cb_path[-3]
924            ld['end_level'] = getattr(certificate_doc,'end_level','999')
925            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
926            d[cb.getId] = ld
927        return d
928    ###)
929
930    def get_from_doc_department(self,doc,cached_data={}): ###(
931        "return the students department"
932        if doc is None:
933            return None
934        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
935            return self._v_certificates[doc.study_course]['department']
936        certificate_res = self.portal_catalog(id = doc.study_course)
937        if len(certificate_res) != 1:
938            return None
939        return certificate_res[0].getPath().split('/')[-3]
940
941    def get_from_doc_faculty(self,doc,cached_data={}):
942        "return the students faculty"
943        if doc is None:
944            return None
945        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
946            return self._v_certificates[doc.study_course]['faculty']
947        certificate_res = self.portal_catalog(id = doc.study_course)
948        if len(certificate_res) != 1:
949            return None
950        return certificate_res[0].getPath().split('/')[-4]
951
952    def get_from_doc_end_level(self,doc,cached_data={}):
953        "return the students end_level"
954        if doc is None:
955            return None
956        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
957            return self._v_certificates[doc.study_course]['end_level']
958        certificate_res = self.portal_catalog(id = doc.study_course)
959        if len(certificate_res) != 1:
960            return None
961        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
962
963    def get_from_doc_level(self,doc,cached_data={}):
964        "return the students level"
965        if doc is None:
966            return None
967        return getattr(doc,'current_level',None)
968
969    #def get_from_doc_mode(self,doc,cached_data={}):
970    #    "return the students mode"
971    #    if doc is None:
972    #        return None
973    #    cm = getattr(doc,'current_mode',None)
974    #    return cm
975   
976    def get_from_doc_mode(self,doc,cached_data={}):
977        "return the students mode"
978        if doc is None:
979            return None
980        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
981            return self._v_certificates[doc.study_course]['study_mode']
982        certificate_res = self.portal_catalog(id = doc.study_course)
983        if len(certificate_res) != 1:
984            return None
985        return getattr(certificate_res[0].getObject().getContent(),'study_mode','unknown')   
986
987
988    def get_from_doc_session(self,doc,cached_data={}):
989        "return the students current_session"
990        if doc is None:
991            return None
992        return getattr(doc,'current_session',None)
993
994    def get_from_doc_entry_session(self,doc,cached_data={}):
995        "return the students entry_session"
996        if doc is None:
997            return None
998        es = getattr(doc,'entry_session',None)
999        if es is not None and len(es) < 3:
1000            return es
1001        elif len(es) == 9:
1002            return es[2:4]   
1003        try:
1004            digit = int(doc.jamb_reg_no[0])
1005        except:
1006            return "-1"
1007        if digit < 9:
1008            return "0%c" % doc.jamb_reg_no[0]
1009        return "9%c" % doc.jamb_reg_no[0]
1010
1011    def get_from_doc_course(self,doc,cached_data={}):
1012        "return the students study_course"
1013        if doc is None:
1014            return None
1015        return getattr(doc,'study_course',None)
1016
1017    def get_from_doc_name(self,doc,cached_data={}):
1018        "return the students name from the personal"
1019        if doc is None:
1020            return None
1021        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
1022
1023    def get_from_doc_verdict(self,doc,cached_data={}):
1024        "return the students study_course"
1025        if doc is None:
1026            return None
1027        return getattr(doc,'current_verdict',None)
1028    ###)
1029
1030    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1031        if not hasattr(self,'_v_certificates'):
1032            self._v_certificates = self.fill_certificates_dict()
1033        if isinstance(name, str):
1034            name = (name,)
1035        reindextypes = {}
1036        reindex_special = []
1037        for n in name:
1038            if n in ("review_state"):
1039                reindex_special.append(n)
1040            else:
1041                for pt in self.affected_types.keys():
1042                    if n in self.affected_types[pt]['fields']:
1043                        if reindextypes.has_key(pt):
1044                            reindextypes[pt].append(n)
1045                        else:
1046                            reindextypes[pt]= [n]
1047                        break
1048        #cached_data = {}
1049        #if set(name).intersection(set(('faculty','department','end_level','mode'))):
1050        #    cached_data = self.fill_certificates_dict()
1051        students = self.portal_catalog(portal_type="Student")
1052        if hasattr(self,'portal_catalog_real'):
1053            aq_portal = self.portal_catalog_real.evalAdvancedQuery
1054        else:
1055            aq_portal = self.portal_catalog.evalAdvancedQuery
1056        num_objects = len(students)
1057        if pghandler:
1058            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1059        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
1060        #import pdb;pdb.set_trace()
1061        for i in xrange(num_objects):
1062            if pghandler: pghandler.report(i)
1063            student_brain = students[i]
1064            student_object = student_brain.getObject()
1065            data = {}
1066            modified = False
1067            sid = data['id'] = student_brain.getId
1068            if reindex_special and 'review_state' in reindex_special:
1069                modified = True
1070                data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1071            sub_objects = False
1072            for pt in reindextypes.keys():
1073                modified = True
1074                try:
1075                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1076                    sub_objects = True
1077                except:
1078                    continue
1079                for field in set(name).intersection(self.affected_types[pt]['fields']):
1080                    if hasattr(self,'get_from_doc_%s' % field):
1081                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc)
1082                    else:
1083                        data[field] = getattr(doc,field)
1084            if not sub_objects and noattr:
1085                import_res = self.returning_import(id = sid)
1086                if not import_res:
1087                    continue
1088                import_record = import_res[0]
1089                data['matric_no'] = import_record.matric_no
1090                data['sex'] = import_record.Sex == 'F'
1091                data['name'] = "%s %s %s" % (import_record.Firstname,
1092                                             import_record.Middlename,
1093                                             import_record.Lastname)
1094                data['jamb_reg_no'] = import_record.Entryregno
1095            if modified:
1096                self.modifyRecord(**data)
1097        if pghandler: pghandler.finish()
1098    ###)
1099
1100    def refreshCatalog(self, clear=0, pghandler=None): ###(
1101        """ re-index everything we can find """
1102        students_folder = self.portal_url.getPortalObject().campus.students
1103        if clear:
1104            self._catalog.clear()
1105        students = self.portal_catalog(portal_type="Student")
1106        num_objects = len(students)
1107        #cached_data = self.fill_certificates_dict()
1108        if not hasattr(self,'_v_certificates'):
1109            self._v_certificates = self.fill_certificates_dict()
1110        if pghandler:
1111            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1112        for i in xrange(num_objects):
1113            if pghandler: pghandler.report(i)
1114            student_brain = students[i]
1115            spath = student_brain.getPath()
1116            student_object = student_brain.getObject()
1117            data = {}
1118            sid = data['id'] = student_brain.getId
1119            #data['review_state'] = student_brain.review_state
1120            data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1121            sub_objects = False
1122            for pt in self.affected_types.keys():
1123                modified = True
1124                try:
1125                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1126                    sub_objects = True
1127                except:
1128                    #from pdb import set_trace;set_trace()
1129                    continue
1130                for field in self.affected_types[pt]['fields']:
1131                    if hasattr(self,'get_from_doc_%s' % field):
1132                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1133                                                                              cached_data=cached_data)
1134                    else:
1135                        data[field] = getattr(doc,field,None)
1136            if not sub_objects:
1137                import_res = self.returning_import(id = sid)
1138                if not import_res:
1139                    continue
1140                import_record = import_res[0]
1141                data['matric_no'] = import_record.matric_no
1142                data['sex'] = import_record.Sex == 'F'
1143                data['name'] = "%s %s %s" % (import_record.Firstname,
1144                                             import_record.Middlename,
1145                                             import_record.Lastname)
1146                data['jamb_reg_no'] = import_record.Entryregno
1147            self.addRecord(**data)
1148        if pghandler: pghandler.finish()
1149    ###)
1150
1151    security.declarePrivate('notify_event_listener') ###(
1152    def notify_event_listener(self,event_type,object,infos):
1153        "listen for events"
1154        if not infos.has_key('rpath'):
1155            return
1156        pt = getattr(object,'portal_type',None)
1157        mt = getattr(object,'meta_type',None)
1158        students_catalog = self
1159        data = {}
1160        if pt == 'Student' and\
1161           mt == 'CPS Proxy Folder' and\
1162           event_type.startswith('workflow'):
1163            data['id'] = object.getId()
1164            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1165            students_catalog.modifyRecord(**data)
1166            return
1167        rpl = infos['rpath'].split('/')
1168        if pt == 'Student' and mt == 'CPS Proxy Folder':
1169            student_id = object.id
1170            if event_type == "sys_add_object":
1171                try:
1172                    self.addRecord(id = student_id)
1173                except ValueError:
1174                    pass
1175                return
1176            elif event_type == 'sys_del_object':
1177                self.deleteRecord(student_id)
1178        if pt not in self.affected_types.keys():
1179            return
1180        if event_type not in ('sys_modify_object'):
1181            return
1182        if mt == 'CPS Proxy Folder':
1183            return
1184        if not hasattr(self,'_v_certificates'):
1185            self._v_certificates = self.fill_certificates_dict()
1186        for field in self.affected_types[pt]['fields']:
1187            if hasattr(self,'get_from_doc_%s' % field):
1188                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1189            else:
1190                data[field] = getattr(object,field)
1191        data['id'] = rpl[2]
1192        self.modifyRecord(**data)
1193    ###)
1194
1195
1196InitializeClass(StudentsCatalog)
1197
1198###)
1199
1200class CertificatesCatalog(WAeUPTable): ###(
1201    security = ClassSecurityInfo()
1202
1203    meta_type = 'WAeUP Certificates Catalog'
1204    name =  "certificates_catalog"
1205    key = "code"
1206    def __init__(self,name=None):
1207        if name ==  None:
1208            name =  self.name
1209        WAeUPTable.__init__(self, name)
1210
1211    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1212        """ clear the catalog, then re-index everything """
1213
1214        elapse = time.time()
1215        c_elapse = time.clock()
1216
1217        pgthreshold = self._getProgressThreshold()
1218        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1219        self.refreshCatalog(clear=1, pghandler=handler)
1220
1221        elapse = time.time() - elapse
1222        c_elapse = time.clock() - c_elapse
1223
1224        RESPONSE.redirect(
1225            URL1 +
1226            '/manage_catalogAdvanced?manage_tabs_message=' +
1227            urllib.quote('Catalog Updated \n'
1228                         'Total time: %s\n'
1229                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1230    ###)
1231
1232    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1233        if isinstance(name, str):
1234            name = (name,)
1235        certificates = self.portal_catalog(portal_type="Certificate")
1236        num_objects = len(certificates)
1237        if pghandler:
1238            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1239        for i in xrange(num_objects):
1240            if pghandler: pghandler.report(i)
1241            certificate_brain = certificates[i]
1242            certificate_object = certificate_brain.getObject()
1243            pl = certificate_brain.getPath().split('/')
1244            data = {}
1245            cid = data[self.key] = certificate_brain.getId
1246            data['faculty'] = pl[-4]
1247            data['department'] = pl[-3]
1248            doc = certificate_object.getContent()
1249            for field in name:
1250                if field not in (self.key,'faculty','department'):
1251                    data[field] = getattr(doc,field)
1252            self.modifyRecord(**data)
1253        if pghandler: pghandler.finish()
1254    ###)
1255
1256    def refreshCatalog(self, clear=0, pghandler=None): ###(
1257        """ re-index everything we can find """
1258        if clear:
1259            self._catalog.clear()
1260        certificates = self.portal_catalog(portal_type="Certificate")
1261        num_objects = len(certificates)
1262        if pghandler:
1263            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1264        #from pdb import set_trace;set_trace()
1265        for i in xrange(num_objects):
1266            if pghandler: pghandler.report(i)
1267            certificate_brain = certificates[i]
1268            certificate_doc = certificate_brain.getObject().getContent()
1269            pl = certificate_brain.getPath().split('/')
1270            data = {}
1271            for field in self.schema():
1272                data[field] = getattr(certificate_doc,field,None)
1273            data[self.key] = certificate_brain.getId
1274            ai = pl.index('academics')
1275            data['faculty'] = pl[ai +1]
1276            data['department'] = pl[ai +2]
1277            if clear:
1278                self.addRecord(**data)
1279            else:
1280                self.modifyRecord(**data)
1281        if pghandler: pghandler.finish()
1282    ###)
1283
1284    security.declarePrivate('notify_event_listener') ###(
1285    def notify_event_listener(self,event_type,object,infos):
1286        "listen for events"
1287        if not infos.has_key('rpath'):
1288            return
1289        pt = getattr(object,'portal_type',None)
1290        mt = getattr(object,'meta_type',None)
1291        if pt != 'Certificate':
1292            return
1293        data = {}
1294        rpl = infos['rpath'].split('/')
1295        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1296            return
1297        certificate_id = object.getId()
1298        data[self.key] = certificate_id
1299        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1300            try:
1301                self.addRecord(**data)
1302            except ValueError:
1303                return
1304            certificate_id = object.getId()
1305            doc = object.getContent()
1306            if doc is None:
1307                return
1308            for field in self.schema():
1309                data[field] = getattr(doc,field,None)
1310            data[self.key] = certificate_id
1311            ai = rpl.index('academics')
1312            data['faculty'] = rpl[ai +1]
1313            data['department'] = rpl[ai +2]
1314            self.modifyRecord(**data)
1315            return
1316        if event_type == "sys_del_object":
1317            self.deleteRecord(certificate_id)
1318            return
1319        if event_type == "sys_modify_object" and mt == 'Certificate':
1320            #from pdb import set_trace;set_trace()
1321            for field in self.schema():
1322                data[field] = getattr(object,field,None)
1323            certificate_id = object.aq_parent.getId()
1324            data[self.key] = certificate_id
1325            ai = rpl.index('academics')
1326            data['faculty'] = rpl[ai +1]
1327            data['department'] = rpl[ai +2]
1328            self.modifyRecord(**data)
1329    ###)
1330
1331
1332InitializeClass(CertificatesCatalog)
1333###)
1334
1335class CoursesCatalog(WAeUPTable): ###(
1336    security = ClassSecurityInfo()
1337
1338    meta_type = 'WAeUP Courses Catalog'
1339    name =  "courses_catalog"
1340    key = "code"
1341    def __init__(self,name=None):
1342        if name ==  None:
1343            name =  self.name
1344        WAeUPTable.__init__(self, name)
1345
1346    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1347        """ clear the catalog, then re-index everything """
1348
1349        elapse = time.time()
1350        c_elapse = time.clock()
1351
1352        pgthreshold = self._getProgressThreshold()
1353        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1354        self.refreshCatalog(clear=1, pghandler=handler)
1355
1356        elapse = time.time() - elapse
1357        c_elapse = time.clock() - c_elapse
1358
1359        RESPONSE.redirect(
1360            URL1 +
1361            '/manage_catalogAdvanced?manage_tabs_message=' +
1362            urllib.quote('Catalog Updated \n'
1363                         'Total time: %s\n'
1364                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1365    ###)
1366
1367    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1368        if isinstance(name, str):
1369            name = (name,)
1370        courses = self.portal_catalog(portal_type="Course")
1371        num_objects = len(courses)
1372        if pghandler:
1373            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1374        for i in xrange(num_objects):
1375            if pghandler: pghandler.report(i)
1376            course_brain = courses[i]
1377            course_object = course_brain.getObject()
1378            pl = course_brain.getPath().split('/')
1379            data = {}
1380            cid = data[self.key] = course_brain.getId
1381            data['faculty'] = pl[-4]
1382            data['department'] = pl[-3]
1383            doc = course_object.getContent()
1384            for field in name:
1385                if field not in (self.key,'faculty','department'):
1386                    data[field] = getattr(doc,field)
1387            self.modifyRecord(**data)
1388        if pghandler: pghandler.finish()
1389    ###)
1390
1391    def refreshCatalog(self, clear=0, pghandler=None): ###(
1392        """ re-index everything we can find """
1393        if clear:
1394            self._catalog.clear()
1395        courses = self.portal_catalog(portal_type="Course")
1396        num_objects = len(courses)
1397        if pghandler:
1398            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1399        #from pdb import set_trace;set_trace()
1400        for i in xrange(num_objects):
1401            if pghandler: pghandler.report(i)
1402            course_brain = courses[i]
1403            course_doc = course_brain.getObject().getContent()
1404            pl = course_brain.getPath().split('/')
1405            data = {}
1406            for field in self.schema():
1407                data[field] = getattr(course_doc,field,None)
1408            data[self.key] = course_brain.getId
1409            ai = pl.index('academics')
1410            data['faculty'] = pl[ai +1]
1411            data['department'] = pl[ai +2]
1412            if clear:
1413                self.addRecord(**data)
1414            else:
1415                self.modifyRecord(**data)
1416        if pghandler: pghandler.finish()
1417    ###)
1418
1419    security.declarePrivate('notify_event_listener') ###(
1420    def notify_event_listener(self,event_type,object,infos):
1421        "listen for events"
1422        if not infos.has_key('rpath'):
1423            return
1424        pt = getattr(object,'portal_type',None)
1425        mt = getattr(object,'meta_type',None)
1426        if pt != 'Course':
1427            return
1428        data = {}
1429        rpl = infos['rpath'].split('/')
1430        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1431            return
1432        course_id = object.getId()
1433        data[self.key] = course_id
1434        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1435            try:
1436                self.addRecord(**data)
1437            except ValueError:
1438                return
1439            course_id = object.getId()
1440            doc = object.getContent()
1441            if doc is None:
1442                return
1443            for field in self.schema():
1444                data[field] = getattr(doc,field,None)
1445            data[self.key] = course_id
1446            ai = rpl.index('academics')
1447            data['faculty'] = rpl[ai +1]
1448            data['department'] = rpl[ai +2]
1449            self.modifyRecord(**data)
1450            return
1451        if event_type == "sys_del_object":
1452            self.deleteRecord(course_id)
1453            return
1454        if event_type == "sys_modify_object" and mt == 'Course':
1455            #from pdb import set_trace;set_trace()
1456            for field in self.schema():
1457                data[field] = getattr(object,field,None)
1458            course_id = object.aq_parent.getId()
1459            data[self.key] = course_id
1460            ai = rpl.index('academics')
1461            data['faculty'] = rpl[ai +1]
1462            data['department'] = rpl[ai +2]
1463            self.modifyRecord(**data)
1464    ###)
1465
1466
1467InitializeClass(CoursesCatalog)
1468###)
1469
1470class CourseResults(WAeUPTable): ###(
1471    security = ClassSecurityInfo()
1472
1473    meta_type = 'WAeUP Results Catalog'
1474    name = "course_results"
1475    key = "key" #student_id + level + course_id
1476    def __init__(self,name=None):
1477        if name ==  None:
1478            name = self.name
1479        WAeUPTable.__init__(self, name)
1480        self._queue = []
1481
1482    def addMultipleRecords(self, records): ###(
1483        """add many records"""
1484        existing_uids = []
1485        for data in records:
1486            uid = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1487            data['%s' % self.key] = uid
1488            query = Eq(self.key, uid)
1489            res = self.course_results.evalAdvancedQuery(query)
1490            if len(res) > 0:
1491                rec = res[0]
1492                equal = True
1493                for attr in ('student_id','level_id','course_id'):
1494                    if getattr(rec,attr,'') != data[attr]:
1495                        equal = False
1496                        break
1497                if equal:
1498                    existing_uids += uid,
1499                    continue
1500            self.catalog_object(dict2ob(data), uid=uid)
1501        return existing_uids
1502    ###)
1503
1504    def deleteResultsHere(self,level_id,student_id): ###(
1505        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1506        course_results = self.course_results.evalAdvancedQuery(query)
1507        #import pdb;pdb.set_trace()
1508        for result in course_results:
1509            self.deleteRecord(result.key)
1510    ###)
1511
1512    def moveResultsHere(self,level,student_id): ###(
1513        #import pdb;pdb.set_trace()
1514        level_id = level.getId()
1515        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1516        course_results = self.course_results.evalAdvancedQuery(query)
1517        existing_courses = [cr.code for cr in course_results]
1518        to_delete = []
1519        for code,obj in level.objectItems():
1520            to_delete.append(code)
1521            carry_over = False
1522            if code.endswith('_co'):
1523                carry_over = True
1524                code  = code[:-3]
1525            if code in existing_courses:
1526                continue
1527            course_result_doc = obj.getContent()
1528            data = {}
1529            course_id = code
1530            for field in self.schema():
1531                data[field] = getattr(course_result_doc,field,'')
1532            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1533            data['student_id'] = student_id
1534            data['level_id'] = level_id
1535            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1536            data['session_id'] = session_id
1537            #data['queue_status'] = OBJECT_CREATED
1538            data['code'] = course_id
1539            data['carry_over'] = carry_over
1540            self.catalog_object(dict2ob(data), uid=key)
1541        level.manage_delObjects(to_delete)
1542    ###)
1543
1544    def getCourses(self,student_id,level_id): ###(
1545        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1546        course_results = self.course_results.evalAdvancedQuery(query)
1547        carry_overs = []
1548        normal1 = []
1549        normal2 = []
1550        normal3 = []
1551        total_credits = 0
1552        gpa = 0
1553        for brain in course_results:
1554            d = {}
1555
1556            for field in self.schema():
1557                d[field] = getattr(brain,field,None)
1558                if repr(d[field]) == 'Missing.Value':
1559                    d[field] = ''
1560            d['weight'] = ''
1561            d['grade'] = ''
1562            d['score'] = ''
1563
1564            if str(brain.credits).isdigit():
1565                credits = int(brain.credits)
1566                total_credits += credits
1567                score = getattr(brain,'score',0)
1568                if score and str(score).isdigit() and int(score) > 0:
1569                    score = int(score)
1570                    grade,weight = self.getGradesFromScore(score,'')
1571                    gpa += weight * credits
1572                    d['weight'] = weight
1573                    d['grade'] = grade
1574                    d['score'] = score
1575
1576            #if str(brain.ca1).isdigit() and str(brain.ca2).isdigit() and str(brain.exam).isdigit():
1577            #    d['score_calc'] = int(brain.ca1) + int(brain.ca2) + int(brain.exam)
1578            #else:
1579            #    d['score_calc'] = ''
1580            try:
1581                d['score_calc'] = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1582            except:
1583                d['score_calc'] = ''
1584
1585            if d['score_calc']:
1586                grade = self.getGradesFromScore(d['score_calc'],level_id)
1587                d['grade'] = grade
1588
1589            d['coe'] = ''
1590            if brain.core_or_elective:
1591                d['coe'] = 'Core'
1592            elif brain.core_or_elective == False:
1593                d['coe'] = 'Elective'
1594            id = code = d['id'] = brain.code
1595            d['code'] = code
1596            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1597            if res:
1598                course = res[0]
1599                d['title'] = course.title
1600                # The courses_catalog contains strings and integers in its semester field.
1601                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1602                d['semester'] = str(course.semester)
1603            else:
1604                d['title'] = "Course has been removed from course list"
1605                d['semester'] = ''
1606            if brain.carry_over:
1607                d['coe'] = 'CO'
1608                carry_overs.append(d)
1609            else:
1610                if d['semester'] == '1':
1611                    normal1.append(d)
1612
1613                elif d['semester'] == '2':
1614                    normal2.append(d)
1615                else:
1616                    normal3.append(d)
1617        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1618        #                                "%(semester)s%(code)s" % y))
1619        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1620                                             "%(semester)s%(code)s" % y))
1621        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1622    ###)
1623
1624
1625    def getAllCourses(self,student_id): ###(
1626        query = Eq('student_id',student_id)
1627        course_results = self.course_results.evalAdvancedQuery(query)
1628        courses = []
1629        for brain in course_results:
1630            d = {}
1631
1632            for field in self.schema():
1633                d[field] = getattr(brain,field,'')
1634
1635            d['weight'] = ''
1636            d['grade'] = ''
1637            d['score'] = ''
1638
1639            if str(brain.credits).isdigit():
1640                credits = int(brain.credits)
1641                score = getattr(brain,'score',0)
1642                if score and str(score).isdigit() and int(score) > 0:
1643                    score = int(score)
1644                    grade,weight = self.getGradesFromScore(score)
1645                    d['weight'] = weight
1646                    d['grade'] = grade
1647                    d['score'] = score
1648            d['coe'] = ''
1649            if brain.core_or_elective:
1650                d['coe'] = 'Core'
1651            elif brain.core_or_elective == False:
1652                d['coe'] = 'Elective'
1653            id = code = d['id'] = brain.code
1654            d['code'] = code
1655            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1656            if res:
1657                course = res[0]
1658                d['title'] = course.title
1659                # The courses_catalog contains strings and integers in its semester field.
1660                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1661                d['semester'] = str(course.semester)
1662            else:
1663                d['title'] = "Course has been removed from course list"
1664                d['semester'] = ''
1665            if brain.carry_over:
1666                d['coe'] = 'CO'
1667            courses.append(d)
1668        return courses
1669    ###)
1670   
1671    def getYearGroupAverage(self,session_id,level_id): ###(
1672        query = Eq('session_id',session_id) & Eq('level_id',level_id)
1673        course_results = self.course_results.evalAdvancedQuery(query)
1674        yga1 = 0
1675        yg1 = []
1676        counter1 = 0
1677        yga2 = 0
1678        yg2 = []
1679        counter2 = 0
1680        yga3 = 0
1681        yg3 = []
1682        counter3 = 0       
1683        #import pdb;pdb.set_trace()
1684        for brain in course_results:
1685            try:
1686                om = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1687                if not om > 0:
1688                    continue
1689                code = brain.code               
1690                res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1691                if res:
1692                    course = res[0]
1693                    # The courses_catalog contains strings and integers in its semester field.
1694                    # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1695                    semester = str(course.semester)
1696                else:
1697                    semester = ''
1698                if semester == '1':
1699                    counter1 += 1
1700                    yga1 += om
1701                    yg1.append(om)
1702                elif semester == '2':
1703                    counter2 += 1
1704                    yga2 += om     
1705                    yg2.append(om)   
1706                if semester == '3':
1707                    counter3 += 1
1708                    yga3 += om
1709                    yg3.append(om)
1710            except:
1711                continue               
1712        if counter1:
1713            yga1 /= counter1
1714            yga1 = '%.2f' % yga1   
1715        if counter2:
1716            yga2 /= counter2
1717            yga2 = '%.2f' % yga2   
1718        if counter3:
1719            yga3 /= counter3
1720            yga3 = '%.2f' % yga3                                   
1721        return yga1, yga2, yga3, counter1, counter2, counter3, yg1, yg2, yg3
1722    ###)
1723   
1724   
1725    #security.declarePublic("calculateCoursePosition")
1726    def calculateCoursePosition(self,session_id,level_id,code,score,semester=None):
1727        #"""calculate Course Position"""
1728        query = Eq('session_id',session_id) & Eq('level_id',level_id) & Eq('code',code)
1729        course_results = self.course_results.evalAdvancedQuery(query)
1730        ygc = []
1731        #import pdb;pdb.set_trace() 
1732        for brain in course_results:
1733            try:
1734                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
1735                    continue
1736                #code = brain.code   
1737                if semester:
1738                    res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1739                    if res:
1740                        course = res[0]
1741                        # The courses_catalog contains strings and integers in its semester field.
1742                        # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1743                        semester_from_course = str(course.semester)
1744                    else:
1745                        continue
1746                    if semester != semester_from_course:
1747                        continue
1748                ygc.append(float(brain.ca1) + float(brain.ca2) + float(brain.exam))
1749            except:
1750                continue     
1751        ygc.sort(reverse=True)
1752        if not len(ygc):
1753            return 'no result'
1754        #import pdb;pdb.set_trace()       
1755        for pos in range(len(ygc)):
1756            if ygc[pos] <= float(score):
1757                break
1758        output = {}   
1759        output['pos'] =  '%d of %d' % (pos+1,len(ygc))
1760        output['ygc'] = ygc
1761        return output
1762       
1763    security.declareProtected(ModifyPortalContent,"calculateAllCoursePositions")
1764    def calculateAllCoursePositions(self,session_id=None):
1765        """calculate All Course Positions"""
1766        logger = logging.getLogger('WAeUPTables.CourseResults.calculateAllCoursePositions')
1767        member = self.portal_membership.getAuthenticatedMember()
1768        logger.info('%s starts recalculation of positions in session %s' % (member,session_id))
1769        if session_id:
1770            query = Eq('session_id',session_id)
1771        else:
1772            return 'no session_id provided'
1773        course_results = self.course_results.evalAdvancedQuery(query)
1774        for brain in course_results:
1775            try:
1776                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
1777                    continue
1778                res = self.courses_catalog.evalAdvancedQuery(Eq('code',brain.code))
1779                if res:
1780                    course = res[0]
1781                    semester_from_course = str(course.semester)
1782                else:
1783                    continue                   
1784                score = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1785                pic = self.calculateCoursePosition('08',brain.level_id,brain.code,score,semester_from_course)['pos']
1786                data = {}
1787                data[self.key] = brain.key
1788                data['pic'] = pic
1789                self.modifyRecord(**data)
1790            except:
1791                continue       
1792        logger.info('recalculation finished')             
1793        return 'ready'   
1794   
1795    def exportRemoveAllCourses(self,student_id,export=False,remove=False): ###(
1796        ""
1797        query = Eq('student_id',student_id)
1798        cr_catalog = self.course_results
1799        course_results = cr_catalog.evalAdvancedQuery(query)
1800        courses = []
1801        fields = self.schema()
1802        format = '"%(' + ')s","%('.join(fields) + ')s"'
1803        for brain in course_results:
1804            d = {}
1805            for field in fields:
1806                d[field] = getattr(brain,field,'')
1807            courses.append(format % d)
1808               
1809        if export:
1810            export_file = "%s/export/course_results_removed.csv" % (i_home)
1811            if not os.path.exists(export_file): 
1812                file_handler = open(export_file,"a")
1813                headline = ','.join(fields)
1814                file_handler.write(headline +'\n')
1815            else:
1816                file_handler = open(export_file,"a")
1817            for line in courses:
1818                file_handler.write(line +'\n')
1819
1820        if remove:
1821            for brain in course_results:
1822                key = getattr(brain,'key','')
1823                cr_catalog.deleteRecord(key)
1824       
1825        return courses
1826    ###)   
1827   
1828   
1829
1830InitializeClass(CourseResults)
1831###)
1832
1833class OnlinePaymentsImport(WAeUPTable): ###(
1834
1835    meta_type = 'WAeUP Online Payment Transactions'
1836    name = "online_payments_import"
1837    key = "order_id"
1838    def __init__(self,name=None):
1839        if name ==  None:
1840            name = self.name
1841        WAeUPTable.__init__(self, name)
1842
1843
1844InitializeClass(OnlinePaymentsImport)
1845###)
1846
1847class ReturningImport(WAeUPTable): ###(
1848
1849    meta_type = 'Returning Import Table'
1850    name = "returning_import"
1851    key = "matric_no"
1852    def __init__(self,name=None):
1853        if name ==  None:
1854            name = self.name
1855        WAeUPTable.__init__(self, name)
1856
1857
1858InitializeClass(ReturningImport)
1859###)
1860
1861class ResultsImport(WAeUPTable): ###(
1862
1863    meta_type = 'Results Import Table'
1864    name = "results_import"
1865    key = "key"
1866    def __init__(self,name=None):
1867        if name ==  None:
1868            name = self.name
1869        WAeUPTable.__init__(self, name)
1870
1871
1872InitializeClass(ResultsImport)
1873
1874###)
1875
1876class PaymentsCatalog(WAeUPTable): ###(
1877    security = ClassSecurityInfo()
1878
1879    meta_type = 'WAeUP Payments Catalog'
1880    name = "payments_catalog"
1881    key = "order_id"
1882    def __init__(self,name=None):
1883        if name ==  None:
1884            name = self.name
1885        WAeUPTable.__init__(self, name)
1886
1887
1888    security.declarePrivate('notify_event_listener') ###(
1889    def notify_event_listener(self,event_type,object,infos):
1890        "listen for events"
1891        if not infos.has_key('rpath'):
1892            return
1893        pt = getattr(object,'portal_type',None)
1894        mt = getattr(object,'meta_type',None)
1895        data = {}
1896        if pt != 'Payment':
1897            return
1898        if event_type == 'sys_del_object' and mt == 'CPS Proxy Folder':
1899            self.deleteRecord(object.getContent().order_id)
1900        if mt == 'CPS Proxy Folder':
1901            return # is handled only for the real object
1902        if event_type not in ('sys_modify_object'):
1903            return
1904        for field in self.schema():
1905            data[field] = getattr(object,field,'')
1906        rpl = infos['rpath'].split('/')
1907        #import pdb;pdb.set_trace()
1908        student_id = rpl[-4]
1909        data['student_id'] = student_id
1910        modified = False
1911        try:
1912            self.modifyRecord(**data)
1913            modified = True
1914        except KeyError:
1915            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1916            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1917            pass
1918        if not modified:
1919            try:
1920                self.addRecord(**data)
1921            except:
1922                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1923                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1924        ###)
1925
1926
1927    def exportRemoveAllPayments(self,student_id,export=False,remove=False): ###(
1928        ""
1929        query = Eq('student_id',student_id)
1930        pm_catalog = self.payments_catalog
1931        payments = pm_catalog.evalAdvancedQuery(query)
1932        payments_dic = []
1933        fields = self.schema()
1934        format = '"%(' + ')s","%('.join(fields) + ')s"'
1935        for brain in payments:
1936            d = {}
1937            for field in fields:
1938                d[field] = getattr(brain,field,'')
1939            payments_dic.append(format % d)
1940               
1941        if export:
1942            export_file = "%s/export/payments_removed.csv" % (i_home)
1943            if not os.path.exists(export_file): 
1944                file_handler = open(export_file,"a")
1945                headline = ','.join(fields)
1946                file_handler.write(headline +'\n')
1947            else:
1948                file_handler = open(export_file,"a")
1949            for line in payments_dic:
1950                file_handler.write(line +'\n')
1951
1952        if remove:
1953            for brain in payments:
1954                order_id = getattr(brain,'order_id','')
1955                pm_catalog.deleteRecord(order_id)
1956       
1957        return payments_dic
1958    ###)   
1959
1960InitializeClass(PaymentsCatalog)
1961
1962###)
1963
1964class RemovedStudentIds(WAeUPTable): ###(
1965
1966    meta_type = 'WAeUP Removed StudentIds'
1967    name = "removed_student_ids"
1968    key = "id"
1969    def __init__(self,name=None):
1970        if name ==  None:
1971            name = self.name
1972        WAeUPTable.__init__(self, name)
1973
1974
1975InitializeClass(RemovedStudentIds)
1976
1977###)
1978
1979# BBB:
1980AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.