source: WAeUP_SRP/trunk/WAeUPTables.py @ 3759

Last change on this file since 3759 was 3757, checked in by Henrik Bettermann, 16 years ago

implement simple URL command filter for dumpAll

  • Property svn:keywords set to Id
File size: 66.8 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 3757 2008-11-11 11:59:48Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            v_dump = v
98            if key == 'sex':
99                if v == True:
100                    v_dump = 'F'
101                elif v == False:
102                    v_dump = 'M'
103                d[key] = v_dump
104            elif v:
105                if key == 'lga':
106                    v_dump = self.portal_vocabularies.local_gov_areas.get(v)
107                    if not v_dump:
108                        v_dump = v
109                elif key == 'aos':
110                    v_dump = self.portal_vocabularies.aos.get(v)
111                d[key] = v_dump
112            else:
113                d[key] = ''
114        return d
115
116###)
117
118    def addRecord(self, **data): ###(
119        # The uid is the same as "bed".
120        uid = data[self.key]
121        res = self.searchResults({"%s" % self.key : uid})
122        if len(res) > 0:
123            raise ValueError("More than one record with uid %s" % uid)
124        self.catalog_object(dict2ob(data), uid=uid)
125        return uid
126
127###)
128
129    def deleteRecord(self, uid):
130        self.uncatalog_object(uid)
131
132    def getRecordByKey(self,key):
133        if not key:
134            return None
135        res = self.evalAdvancedQuery(Eq(self.key,key))
136        if res:
137            return res[0]
138        return None
139
140    def searchAndSetRecord(self, **data):
141        raise NotImplemented
142
143    def modifyRecord(self, record=None, **data): ###(
144        #records = self.searchResults(uid=uid)
145        uid = data[self.key]
146        if record is None:
147            records = self.searchResults({"%s" % self.key : uid})
148            if len(records) > 1:
149                # Can not happen, but anyway...
150                raise ValueError("More than one record with uid %s" % uid)
151            if len(records) == 0:
152                raise KeyError("No record for uid %s" % uid)
153            record = records[0]
154        record_data = {}
155        for field in self.schema() + self.indexes():
156            record_data[field] = getattr(record, field)
157        # Add the updated data:
158        record_data.update(data)
159        self.catalog_object(dict2ob(record_data), uid)
160
161###)
162
163    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
164        if isinstance(name, str):
165            name =  (name,)
166        paths = self._catalog.uids.items()
167        i = 0
168        #import pdb;pdb.set_trace()
169        for p,rid in paths:
170            i += 1
171            metadata = self.getMetadataForRID(rid)
172            record_data = {}
173            for field in name:
174                record_data[field] = metadata.get(field)
175            uid = metadata.get(self.key)
176            self.catalog_object(dict2ob(record_data), uid, idxs=name,
177                                update_metadata=0)
178
179###)
180
181    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
182    def exportAllRecords(self):
183        "export a WAeUPTable"
184        #import pdb;pdb.set_trace()
185        fields = [field for field in self.schema()]
186        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
187        csv = []
188        csv.append(','.join(['"%s"' % fn for fn in fields]))
189        for uid in self._catalog.uids:
190            records = self.searchResults({"%s" % self.key : uid})
191            if len(records) > 1:
192                # Can not happen, but anyway...
193                raise ValueError("More than one record with uid %s" % uid)
194            if len(records) == 0:
195                raise KeyError("No record for uid %s" % uid)
196            rec = records[0]
197            csv.append(format % rec)
198        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
199        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
200
201###)
202
203    security.declareProtected(ModifyPortalContent,"dumpAll")###(
204    def dumpAll(self,index=None,value=None):
205        """dump all data in the table to a csv"""
206        member = self.portal_membership.getAuthenticatedMember()
207        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
208        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
209        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
210        res_list = []
211        lines = []
212        if hasattr(self,"export_keys"):
213            fields = self.export_keys
214        else:
215            fields = []
216            for f in self.schema():
217                fields.append(f)
218        headline = ','.join(fields)
219        out = open(export_file,"wb")
220        out.write(headline +'\n')
221        out.close()
222        out = open(export_file,"a")
223        csv_writer = csv.DictWriter(out,fields,)
224        if index is not None and value is not None:
225            records = self.evalAdvancedQuery(Eq(index,value))
226        else:
227            records = self()
228        nr2export = len(records)
229        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
230        chunk = 2000
231        total = 0
232        start = DateTime.DateTime().timeTime()
233        start_chunk = DateTime.DateTime().timeTime()
234        for record in records:
235            not_all = False
236            d = self.record2dict(fields,record)
237            lines.append(d)
238            total += 1
239            if total and not total % chunk or total == len(records):
240                csv_writer.writerows(lines)
241                anz = len(lines)
242                logger.info("wrote %(anz)d  total written %(total)d" % vars())
243                end_chunk = DateTime.DateTime().timeTime()
244                duration = end_chunk-start_chunk
245                per_record = duration/anz
246                till_now = end_chunk - start
247                avarage_per_record = till_now/total
248                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
249                estimated_end = estimated_end.strftime("%H:%M:%S")
250                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
251                start_chunk = DateTime.DateTime().timeTime()
252                lines = []
253        end = DateTime.DateTime().timeTime()
254        logger.info('total time %6.2f m' % ((end-start)/60))
255        import os
256        filename, extension = os.path.splitext(export_file)
257        from subprocess import call
258        msg = "wrote %(total)d records to %(export_file)s" % vars()
259        #try:
260        #    retcode = call('gzip %s' % (export_file),shell=True)
261        #    if retcode == 0:
262        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
263        #except OSError, e:
264        #    retcode = -99
265        #    logger.info("zip failed with %s" % e)
266        logger.info(msg)
267        args = {'portal_status_message': msg}
268        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
269        url = self.REQUEST.get('URL2')
270        return self.REQUEST.RESPONSE.redirect(url)
271    ###)
272
273    security.declarePrivate("_import_old") ###(
274    def _import_old(self,filename,schema,layout, mode,logger):
275        "import data from csv"
276        import transaction
277        import random
278        pm = self.portal_membership
279        member = pm.getAuthenticatedMember()
280        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
281        import_fn = "%s/import/%s.csv" % (i_home,filename)
282        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
283        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
284        start = True
285        tr_count = 1
286        total_imported = 0
287        total_not_imported = 0
288        total = 0
289        iname =  "%s" % filename
290        not_imported = []
291        imported = []
292        valid_records = []
293        invalid_records = []
294        d = {}
295        d['mode'] = mode
296        d['imported'] = total_imported
297        d['not_imported'] = total_not_imported
298        d['valid_records'] = valid_records
299        d['invalid_records'] = invalid_records
300        d['import_fn'] = import_fn
301        d['imported_fn'] = imported_fn
302        d['not_imported_fn'] = not_imported_fn
303        if schema is None:
304            em = 'No schema specified'
305            logger.error(em)
306            return d
307        if layout is None:
308            em = 'No layout specified'
309            logger.error(em)
310            return d
311        validators = {}
312        for widget in layout.keys():
313            try:
314                validators[widget] = layout[widget].validate
315            except AttributeError:
316                logger.info('%s has no validate attribute' % widget)
317                return d
318        # if mode == 'edit':
319        #     importer = self.importEdit
320        # elif mode == 'add':
321        #     importer = self.importAdd
322        # else:
323        #     importer = None
324        try:
325            items = csv.DictReader(open(import_fn,"rb"),
326                                   dialect="excel",
327                                   skipinitialspace=True)
328        except:
329            em = 'Error reading %s.csv' % filename
330            logger.error(em)
331            return d
332        #import pdb;pdb.set_trace()
333        for item in items:
334            if start:
335                start = False
336                logger.info('%s starts import from %s.csv' % (member,filename))
337                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
338                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
339                                   dialect="excel",
340                                   skipinitialspace=True).next()
341                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
342                diff2schema = set(import_keys).difference(set(schema.keys()))
343                diff2layout = set(import_keys).difference(set(layout.keys()))
344                if diff2layout:
345                    em = "not ignorable key(s) %s found in heading" % diff2layout
346                    logger.info(em)
347                    return d
348                s = ','.join(['"%s"' % fn for fn in import_keys])
349                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
350                #s = '"id",' + s
351                open(imported_fn,"a").write(s + '\n')
352                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
353                format_error = format + ',"%(Error)s"'
354                #format = '"%(id)s",'+ format
355                adapters = [MappingStorageAdapter(schema, item)]
356            dm = DataModel(item, adapters,context=self)
357            ds = DataStructure(data=item,datamodel=dm)
358            error_string = ""
359            #import pdb;pdb.set_trace()
360            for k in import_keys:
361                if not validators[k](ds,mode=mode):
362                    error_string += " %s : %s" % (k,ds.getError(k))
363            # if not error_string and importer:
364            #     item.update(dm)
365            #     item['id'],error = importer(item)
366            #     if error:
367            #         error_string += error
368            if error_string:
369                item['Error'] = error_string
370                invalid_records.append(dm)
371                not_imported.append(format_error % item)
372                total_not_imported += 1
373            else:
374                em = format % item
375                valid_records.append(dm)
376                imported.append(em)
377                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
378                tr_count += 1
379                total_imported += 1
380            total += 1
381        if len(imported) > 0:
382            open(imported_fn,"a").write('\n'.join(imported))
383        if len(not_imported) > 0:
384            open(not_imported_fn,"a").write('\n'.join(not_imported))
385        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
386        d['imported'] = total_imported
387        d['not_imported'] = total_not_imported
388        d['valid_records'] = valid_records
389        d['invalid_records'] = invalid_records
390        d['imported_fn'] = imported_fn
391        d['not_imported_fn'] = not_imported_fn
392        #logger.info(em)
393        return d
394    ###)
395
396    security.declarePrivate("_import") ###(
397    def _import_new(self,csv_items,schema, layout, mode,logger):
398        "import data from csv.Dictreader Instance"
399        start = True
400        tr_count = 1
401        total_imported = 0
402        total_not_imported = 0
403        total = 0
404        iname =  "%s" % filename
405        not_imported = []
406        valid_records = []
407        invalid_records = []
408        duplicate_records = []
409        d = {}
410        d['mode'] = mode
411        d['valid_records'] = valid_records
412        d['invalid_records'] = invalid_records
413        d['invalid_records'] = duplicate_records
414        # d['import_fn'] = import_fn
415        # d['imported_fn'] = imported_fn
416        # d['not_imported_fn'] = not_imported_fn
417        validators = {}
418        for widget in layout.keys():
419            try:
420                validators[widget] = layout[widget].validate
421            except AttributeError:
422                logger.info('%s has no validate attribute' % widget)
423                return d
424        for item in csv_items:
425            if start:
426                start = False
427                logger.info('%s starts import from %s.csv' % (member,filename))
428                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
429                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
430                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
431                diff2schema = set(import_keys).difference(set(schema.keys()))
432                diff2layout = set(import_keys).difference(set(layout.keys()))
433                if diff2layout:
434                    em = "not ignorable key(s) %s found in heading" % diff2layout
435                    logger.info(em)
436                    return d
437                # s = ','.join(['"%s"' % fn for fn in import_keys])
438                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
439                # #s = '"id",' + s
440                # open(imported_fn,"a").write(s + '\n')
441                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
442                # format_error = format + ',"%(Error)s"'
443                # #format = '"%(id)s",'+ format
444                adapters = [MappingStorageAdapter(schema, item)]
445            dm = DataModel(item, adapters,context=self)
446            ds = DataStructure(data=item,datamodel=dm)
447            error_string = ""
448            for k in import_keys:
449                if not validators[k](ds,mode=mode):
450                    error_string += " %s : %s" % (k,ds.getError(k))
451            if error_string:
452                item['Error'] = error_string
453                #invalid_records.append(dm)
454                invalid_records.append(item)
455                total_not_imported += 1
456            else:
457                em = format % item
458                valid_records.append(dm)
459                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
460                tr_count += 1
461                total_imported += 1
462            total += 1
463        # if len(imported) > 0:
464        #     open(imported_fn,"a").write('\n'.join(imported))
465        # if len(not_imported) > 0:
466        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
467        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
468        d['imported'] = total_imported
469        d['not_imported'] = total_not_imported
470        d['valid_records'] = valid_records
471        d['invalid_records'] = invalid_records
472        return d
473    ###)
474
475    security.declarePublic("missingValue")###(
476    def missingValue(self):
477        from Missing import MV
478        return MV
479    ###)
480###)
481
482class AccommodationTable(WAeUPTable): ###(
483
484    meta_type = 'WAeUP Accommodation Tool'
485    name = "portal_accommodation"
486    key = "bed"
487    not_occupied = NOT_OCCUPIED
488    def __init__(self,name=None):
489        if name ==  None:
490            name = self.name
491        WAeUPTable.__init__(self, name)
492
493    def searchAndReserveBed(self, student_id,bed_type): ###(
494        logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
495        records = self.evalAdvancedQuery(Eq('student',student_id))
496        if len(records) == 1:
497            #return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
498            logger.info('%s found (reserved) bed %s' % (student_id,records[0].bed))
499            return -1,records[0].bed
500        elif len(records) > 1:
501            logger.info('%s found more than one (reserved) bed' % (student_id))
502            return -3,'more than one bed'
503        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
504        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
505        if len(records) == 0:
506            logger.info('no bed %s available for %s' % (bed_type,student_id))
507            return -2,"no bed"
508        rec = records[0]
509        self.modifyRecord(bed=rec.bed,student=student_id)
510        logger.info('%s booked bed %s' % (student_id,rec.bed))
511        return 1,rec.bed
512    ###)
513
514
515InitializeClass(AccommodationTable)
516
517###)
518
519class PinTable(WAeUPTable): ###(
520    from ZODB.POSException import ConflictError
521    security = ClassSecurityInfo()
522    meta_type = 'WAeUP Pin Tool'
523    name = "portal_pins"
524    key = 'pin'
525
526    def __init__(self,name=None):
527        if name ==  None:
528            name = self.name
529        WAeUPTable.__init__(self, name)
530
531    security.declareProtected(ModifyPortalContent,"dumpAll")###(
532    def dumpAll(self,include_unused=None):
533        """dump all data in the table to a csv"""
534        member = self.portal_membership.getAuthenticatedMember()
535        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
536        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
537        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
538        res_list = []
539        lines = []
540        if hasattr(self,"export_keys"):
541            fields = self.export_keys
542        else:
543            fields = []
544            for f in self.schema():
545                fields.append(f)
546        headline = ','.join(fields)
547        out = open(export_file,"wb")
548        out.write(headline +'\n')
549        out.close()
550        out = open(export_file,"a")
551        csv_writer = csv.DictWriter(out,fields,)
552        if include_unused is not None and str(member) not in ('admin','joachim'):
553            logger.info('%s tries to dump pintable with unused pins' % (member))
554            return
555        if include_unused is not None:
556            records = self()
557        else:
558            records = self.evalAdvancedQuery(~Eq('student',''))
559        nr2export = len(records)
560        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
561        chunk = 2000
562        total = 0
563        start = DateTime.DateTime().timeTime()
564        start_chunk = DateTime.DateTime().timeTime()
565        for record in records:
566            not_all = False
567            d = self.record2dict(fields,record)
568            lines.append(d)
569            total += 1
570            if total and not total % chunk or total == len(records):
571                csv_writer.writerows(lines)
572                anz = len(lines)
573                logger.info("wrote %(anz)d  total written %(total)d" % vars())
574                end_chunk = DateTime.DateTime().timeTime()
575                duration = end_chunk-start_chunk
576                per_record = duration/anz
577                till_now = end_chunk - start
578                avarage_per_record = till_now/total
579                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
580                estimated_end = estimated_end.strftime("%H:%M:%S")
581                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
582                start_chunk = DateTime.DateTime().timeTime()
583                lines = []
584        end = DateTime.DateTime().timeTime()
585        logger.info('total time %6.2f m' % ((end-start)/60))
586        import os
587        filename, extension = os.path.splitext(export_file)
588        from subprocess import call
589        msg = "wrote %(total)d records to %(export_file)s" % vars()
590        #try:
591        #    retcode = call('gzip %s' % (export_file),shell=True)
592        #    if retcode == 0:
593        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
594        #except OSError, e:
595        #    retcode = -99
596        #    logger.info("zip failed with %s" % e)
597        logger.info(msg)
598        args = {'portal_status_message': msg}
599        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
600        url = self.REQUEST.get('URL2')
601        return self.REQUEST.RESPONSE.redirect(url)
602    ###)
603
604
605
606    def searchAndSetRecord(self, uid, student_id,prefix):
607
608        # The following line must be activated after resetting the
609        # the portal_pins table. This is to avoid duplicate entries
610        # and disable duplicate payments.
611
612        #student_id = student_id.upper()
613
614        #records = self.searchResults(student = student_id)
615        #if len(records) > 0 and prefix in ('CLR','APP'):
616        #    for r in records:
617        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
618        #            return -2
619        records = self.searchResults({"%s" % self.key : uid})
620        if len(records) > 1:
621            # Can not happen, but anyway...
622            raise ValueError("More than one record with uid %s" % uid)
623        if len(records) == 0:
624            return -1,None
625        record = records[0]
626        if record.student == "":
627            record_data = {}
628            for field in self.schema() + self.indexes():
629                record_data[field] = getattr(record, field)
630            # Add the updated data:
631            record_data['student'] = student_id
632            try:
633                self.catalog_object(dict2ob(record_data), uid)
634                return 1,record
635            except ConflictError:
636                return 2,record
637        if record.student.upper() != student_id.upper():
638            return 0,record
639        if record.student.upper() == student_id.upper():
640            return 2,record
641        return -3,record
642InitializeClass(PinTable)
643###)
644
645class PumeResultsTable(WAeUPTable): ###(
646
647    meta_type = 'WAeUP PumeResults Tool'
648    name = "portal_pumeresults"
649    key = "jamb_reg_no"
650    def __init__(self,name=None):
651        if name ==  None:
652            name = self.name
653        WAeUPTable.__init__(self, name)
654
655
656InitializeClass(PumeResultsTable)
657
658###)
659
660class ApplicantsCatalog(WAeUPTable): ###(
661
662    meta_type = 'WAeUP Applicants Catalog'
663    name = "applicants_catalog"
664    key = "reg_no"
665    security = ClassSecurityInfo()
666    #export_keys = (
667    #               "reg_no",
668    #               "status",
669    #               "lastname",
670    #               "sex",
671    #               "date_of_birth",
672    #               "lga",
673    #               "email",
674    #               "phone",
675    #               "passport",
676    #               "entry_mode",
677    #               "pin",
678    #               "screening_type",
679    #               "registration_date",
680    #               "testdate",
681    #               "application_date",
682    #               "screening_date",
683    #               "faculty",
684    #               "department",
685    #               "course1",
686    #               "course2",
687    #               "course3",
688    #               "eng_score",
689    #               "subj1",
690    #               "subj1score",
691    #               "subj2",
692    #               "subj2score",
693    #               "subj3",
694    #               "subj3score",
695    #               "aggregate",
696    #               "course_admitted",
697    #               )
698
699    def __init__(self,name=None):
700        if name ==  None:
701            name = self.name
702        WAeUPTable.__init__(self, name)
703
704    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
705    def new_importCSV(self,filename="JAMB_data",
706                  schema_id="application",
707                  layout_id="import_application",
708                  mode='add'):
709        """ import JAMB data """
710        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
711        pm = self.portal_membership
712        member = pm.getAuthenticatedMember()
713        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
714        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
715        import_fn = "%s/import/%s.csv" % (i_home,filename)
716        if mode not in ('add','edit'):
717            logger.info("invalid mode: %s" % mode)
718        if os.path.exists(lock_fn):
719            logger.info("import of %(import_fn)s already in progress" % vars())
720            return
721        lock_file = open(lock_fn,"w")
722        lock_file.write("%(current)s \n" % vars())
723        lock_file.close()
724        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
725        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
726        stool = getToolByName(self, 'portal_schemas')
727        ltool = getToolByName(self, 'portal_layouts')
728        schema = stool._getOb(schema_id)
729        if schema is None:
730            em = 'No such schema %s' % schema_id
731            logger.error(em)
732            return
733        for postfix in ('_import',''):
734            layout_name = "%(layout_id)s%(postfix)s" % vars()
735            if hasattr(ltool,layout_name):
736                break
737        layout = ltool._getOb(layout_name)
738        if layout is None:
739            em = 'No such layout %s' % layout_id
740            logger.error(em)
741            return
742        try:
743            csv_file = csv.DictReader(open(import_fn,"rb"))
744        except:
745            em = 'Error reading %s.csv' % filename
746            logger.error(em)
747            return
748        d = self._import_new(csv_items,schema,layout,mode,logger)
749        imported = []
750        edited = []
751        duplicates = []
752        not_found = []
753        if len(d['valid_records']) > 0:
754            for record in d['valid_records']:
755                #import pdb;pdb.set_trace()
756                if mode == "add":
757                    try:
758                        self.addRecord(**dict(record.items()))
759                        imported.append(**dict(record.items()))
760                        logger.info("added %s" % record.items())
761                    except ValueError:
762                        dupplicate.append(**dict(record.items()))
763                        logger.info("duplicate %s" % record.items())
764                elif mode == "edit":
765                    try:
766                        self.modifyRecord(**dict(record.items()))
767                        edited.append(**dict(record.items()))
768                        logger.info("edited %s" % record.items())
769                    except KeyError:
770                        not_found.append(**dict(record.items()))
771                        logger.info("not found %s" % record.items())
772        invalid = d['invalid_records']
773        for itype in ("imported","edited","not_found","duplicate","invalid"):
774            outlist = locals[itype]
775            if len(outlist):
776                d = {}
777                for k in outlist[0].keys():
778                    d[k] = k
779                outlist[0] = d
780                outfile = open("file_name_%s" % itype,'w')
781                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
782                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
783###)
784
785    security.declareProtected(ModifyPortalContent,"importCSV")###(
786    def importCSV(self,filename="JAMB_data",
787                  schema_id="application",
788                  layout_id="application_pce",
789                  mode='add'):
790        """ import JAMB data """
791        stool = getToolByName(self, 'portal_schemas')
792        ltool = getToolByName(self, 'portal_layouts')
793        schema = stool._getOb(schema_id)
794        if schema is None:
795            em = 'No such schema %s' % schema_id
796            logger.error(em)
797            return
798        layout = ltool._getOb(layout_id)
799        if layout is None:
800            em = 'No such layout %s' % layout_id
801            logger.error(em)
802            return
803        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
804        d = self._import_old(filename,schema,layout,mode,logger)
805        if len(d['valid_records']) > 0:
806            for record in d['valid_records']:
807                #import pdb;pdb.set_trace()
808                if mode == "add":
809                    self.addRecord(**dict(record.items()))
810                    logger.info("added %s" % record.items())
811                elif mode == "edit":
812                    self.modifyRecord(**dict(record.items()))
813                    logger.info("edited %s" % record.items())
814                else:
815                    logger.info("invalid mode: %s" % mode)
816        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
817    ###)
818
819InitializeClass(ApplicantsCatalog)
820
821###)
822
823class StudentsCatalog(WAeUPTable): ###(
824    security = ClassSecurityInfo()
825
826    meta_type = 'WAeUP Students Catalog'
827    name = "students_catalog"
828    key = "id"
829    affected_types = {   ###(
830                      'StudentApplication':
831                      {'id': 'application',
832                       'fields':
833                       ('jamb_reg_no',
834                        'entry_mode',
835                        #'entry_level',
836                        'entry_session',
837                       )
838                      },
839                      'StudentClearance':
840                      {'id': 'clearance',
841                       'fields':
842                       ('matric_no',
843                        'lga',
844                       )
845                      },
846                      'StudentPersonal':
847                      {'id': 'personal',
848                       'fields':
849                       ('name',
850                        'sex',
851                        'perm_address',
852                        'email',
853                        'phone',
854                       )
855                      },
856                      'StudentStudyCourse':
857                      {'id': 'study_course',
858                       'fields':
859                       ('course', # study_course
860                        'faculty', # from certificate
861                        'department', # from certificate
862                        'end_level', # from certificate
863                        'level', # current_level
864                        'mode',  # current_mode
865                        'session', # current_session
866                        'verdict', # current_verdict
867                       )
868                      },
869                     }
870    ###)
871
872    def __init__(self,name=None):
873        if name ==  None:
874            name = self.name
875        WAeUPTable.__init__(self, name)
876        return
877
878    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
879        """ clears the whole enchilada """
880        self._catalog.clear()
881
882        if REQUEST and RESPONSE:
883            RESPONSE.redirect(
884              URL1 +
885              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
886
887    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
888        """ clear the catalog, then re-index everything """
889
890        elapse = time.time()
891        c_elapse = time.clock()
892
893        pgthreshold = self._getProgressThreshold()
894        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
895        self.refreshCatalog(clear=1, pghandler=handler)
896
897        elapse = time.time() - elapse
898        c_elapse = time.clock() - c_elapse
899
900        RESPONSE.redirect(
901            URL1 +
902            '/manage_catalogAdvanced?manage_tabs_message=' +
903            urllib.quote('Catalog Updated \n'
904                         'Total time: %s\n'
905                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
906    ###)
907
908    def fill_certificates_dict(self): ###(
909        "return certificate data in  dict"
910        certificates_brains = self.portal_catalog(portal_type ='Certificate')
911        d = {}
912        for cb in certificates_brains:
913            certificate_doc = cb.getObject().getContent()
914            cb_path = cb.getPath().split('/')
915            ld = {}
916            ld['faculty'] = cb_path[-4]
917            ld['department'] = cb_path[-3]
918            ld['end_level'] = getattr(certificate_doc,'end_level','999')
919            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
920            d[cb.getId] = ld
921        return d
922    ###)
923
924    def get_from_doc_department(self,doc,cached_data={}): ###(
925        "return the students department"
926        if doc is None:
927            return None
928        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
929            return self._v_certificates[doc.study_course]['department']
930        certificate_res = self.portal_catalog(id = doc.study_course)
931        if len(certificate_res) != 1:
932            return None
933        return certificate_res[0].getPath().split('/')[-3]
934
935    def get_from_doc_faculty(self,doc,cached_data={}):
936        "return the students faculty"
937        if doc is None:
938            return None
939        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
940            return self._v_certificates[doc.study_course]['faculty']
941        certificate_res = self.portal_catalog(id = doc.study_course)
942        if len(certificate_res) != 1:
943            return None
944        return certificate_res[0].getPath().split('/')[-4]
945
946    def get_from_doc_end_level(self,doc,cached_data={}):
947        "return the students end_level"
948        if doc is None:
949            return None
950        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
951            return self._v_certificates[doc.study_course]['end_level']
952        certificate_res = self.portal_catalog(id = doc.study_course)
953        if len(certificate_res) != 1:
954            return None
955        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
956
957    def get_from_doc_level(self,doc,cached_data={}):
958        "return the students level"
959        if doc is None:
960            return None
961        return getattr(doc,'current_level',None)
962
963    def get_from_doc_mode(self,doc,cached_data={}):
964        "return the students mode"
965        if doc is None:
966            return None
967        cm = getattr(doc,'current_mode',None)
968        return cm
969
970
971    def get_from_doc_session(self,doc,cached_data={}):
972        "return the students current_session"
973        if doc is None:
974            return None
975        return getattr(doc,'current_session',None)
976
977    def get_from_doc_entry_session(self,doc,cached_data={}):
978        "return the students entry_session"
979        if doc is None:
980            return None
981        es = getattr(doc,'entry_session',None)
982        if es is not None and len(es) == 2:
983            return es
984        elif len(es) == 9:
985            return es[2:4]   
986        try:
987            digit = int(doc.jamb_reg_no[0])
988        except:
989            return "-1"
990        if digit < 8:
991            return "0%c" % doc.jamb_reg_no[0]
992        return "9%c" % doc.jamb_reg_no[0]
993
994    def get_from_doc_course(self,doc,cached_data={}):
995        "return the students study_course"
996        if doc is None:
997            return None
998        return getattr(doc,'study_course',None)
999
1000    def get_from_doc_name(self,doc,cached_data={}):
1001        "return the students name from the personal"
1002        if doc is None:
1003            return None
1004        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
1005
1006    def get_from_doc_verdict(self,doc,cached_data={}):
1007        "return the students study_course"
1008        if doc is None:
1009            return None
1010        return getattr(doc,'current_verdict',None)
1011    ###)
1012
1013    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1014        if isinstance(name, str):
1015            name = (name,)
1016        reindextypes = {}
1017        reindex_special = []
1018        for n in name:
1019            if n in ("review_state","registered_courses"):
1020                reindex_special.append(n)
1021            else:
1022                for pt in self.affected_types.keys():
1023                    if n in self.affected_types[pt]['fields']:
1024                        if reindextypes.has_key(pt):
1025                            reindextypes[pt].append(n)
1026                        else:
1027                            reindextypes[pt]= [n]
1028                        break
1029        cached_data = {}
1030        if set(name).intersection(set(('faculty','department','end_level'))):
1031            cached_data = self.fill_certificates_dict()
1032        students = self.portal_catalog(portal_type="Student")
1033        if hasattr(self,'portal_catalog_real'):
1034            aq_portal = self.portal_catalog_real.evalAdvancedQuery
1035        else:
1036            aq_portal = self.portal_catalog.evalAdvancedQuery
1037        num_objects = len(students)
1038        if pghandler:
1039            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1040        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
1041        #import pdb;pdb.set_trace()
1042        for i in xrange(num_objects):
1043            if pghandler: pghandler.report(i)
1044            student_brain = students[i]
1045            student_object = student_brain.getObject()
1046            # query = Eq('path',student_brain.getPath())
1047            # sub_brains_list = aq_portal(query)
1048            # sub_brains = {}
1049            # for sub_brain in sub_brains_list:
1050            #     sub_brains[sub_brain.portal_type] = sub_brain
1051            # student_path = student_brain.getPath()
1052            data = {}
1053            modified = False
1054            sid = data['id'] = student_brain.getId
1055            if reindex_special and 'review_state' in reindex_special:
1056                modified = True
1057                data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1058            sub_objects = False
1059            for pt in reindextypes.keys():
1060                modified = True
1061                try:
1062                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1063                    #doc = sub_brains[pt].getObject().getContent()
1064                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
1065                    # doc = self.unrestrictedTraverse(path).getContent()
1066                    sub_objects = True
1067                except:
1068                    continue
1069                for field in set(name).intersection(self.affected_types[pt]['fields']):
1070                    if hasattr(self,'get_from_doc_%s' % field):
1071                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1072                                                                              cached_data=cached_data)
1073                    else:
1074                        data[field] = getattr(doc,field)
1075            if not sub_objects and noattr:
1076                import_res = self.returning_import(id = sid)
1077                if not import_res:
1078                    continue
1079                import_record = import_res[0]
1080                data['matric_no'] = import_record.matric_no
1081                data['sex'] = import_record.Sex == 'F'
1082                data['name'] = "%s %s %s" % (import_record.Firstname,
1083                                             import_record.Middlename,
1084                                             import_record.Lastname)
1085                data['jamb_reg_no'] = import_record.Entryregno
1086            #if reindex_special and 'registered_courses' in reindex_special:
1087            #    try:
1088            #        study_course = getattr(student_object,"study_course")
1089            #        level_ids = study_course.objectIds()
1090            #    except:
1091            #        continue
1092            #    if not level_ids:
1093            #        continue
1094            #    modified = True
1095            #    level_ids.sort()
1096            #    course_ids = getattr(study_course,level_ids[-1]).objectIds()
1097            #    courses = []
1098            #    for c in course_ids:
1099            #        if c.endswith('_co'):
1100            #            courses.append(c[:-3])
1101            #        else:
1102            #            courses.append(c)
1103            #    data['registered_courses'] = courses
1104            if modified:
1105                self.modifyRecord(**data)
1106        if pghandler: pghandler.finish()
1107    ###)
1108
1109    def refreshCatalog(self, clear=0, pghandler=None): ###(
1110        """ re-index everything we can find """
1111        students_folder = self.portal_url.getPortalObject().campus.students
1112        if clear:
1113            self._catalog.clear()
1114        students = self.portal_catalog(portal_type="Student")
1115        num_objects = len(students)
1116        cached_data = self.fill_certificates_dict()
1117        if pghandler:
1118            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1119        for i in xrange(num_objects):
1120            if pghandler: pghandler.report(i)
1121            student_brain = students[i]
1122            spath = student_brain.getPath()
1123            student_object = student_brain.getObject()
1124            data = {}
1125            sid = data['id'] = student_brain.getId
1126            #data['review_state'] = student_brain.review_state
1127            data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1128            sub_objects = False
1129            for pt in self.affected_types.keys():
1130                modified = True
1131                try:
1132                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1133                    sub_objects = True
1134                except:
1135                    #from pdb import set_trace;set_trace()
1136                    continue
1137                for field in self.affected_types[pt]['fields']:
1138                    if hasattr(self,'get_from_doc_%s' % field):
1139                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1140                                                                              cached_data=cached_data)
1141                    else:
1142                        data[field] = getattr(doc,field,None)
1143            if not sub_objects:
1144                import_res = self.returning_import(id = sid)
1145                if not import_res:
1146                    continue
1147                import_record = import_res[0]
1148                data['matric_no'] = import_record.matric_no
1149                data['sex'] = import_record.Sex == 'F'
1150                data['name'] = "%s %s %s" % (import_record.Firstname,
1151                                             import_record.Middlename,
1152                                             import_record.Lastname)
1153                data['jamb_reg_no'] = import_record.Entryregno
1154            self.addRecord(**data)
1155        if pghandler: pghandler.finish()
1156    ###)
1157
1158    security.declarePrivate('notify_event_listener') ###(
1159    def notify_event_listener(self,event_type,object,infos):
1160        "listen for events"
1161        if not infos.has_key('rpath'):
1162            return
1163        pt = getattr(object,'portal_type',None)
1164        mt = getattr(object,'meta_type',None)
1165        students_catalog = self
1166        data = {}
1167        if pt == 'Student' and\
1168           mt == 'CPS Proxy Folder' and\
1169           event_type.startswith('workflow'):
1170            data['id'] = object.getId()
1171            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1172            students_catalog.modifyRecord(**data)
1173            return
1174        rpl = infos['rpath'].split('/')
1175        if pt == 'Student' and mt == 'CPS Proxy Folder':
1176            student_id = object.id
1177            if event_type == "sys_add_object":
1178                try:
1179                    self.addRecord(id = student_id)
1180                except ValueError:
1181                    pass
1182                return
1183            elif event_type == 'sys_del_object':
1184                self.deleteRecord(student_id)
1185        if pt not in self.affected_types.keys():
1186            return
1187        if event_type not in ('sys_modify_object'):
1188            return
1189        if mt == 'CPS Proxy Folder':
1190            return
1191        if not hasattr(self,'_v_certificates'):
1192            self._v_certificates = self.fill_certificates_dict()
1193        for field in self.affected_types[pt]['fields']:
1194            if hasattr(self,'get_from_doc_%s' % field):
1195                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1196            else:
1197                data[field] = getattr(object,field)
1198        data['id'] = rpl[2]
1199        self.modifyRecord(**data)
1200    ###)
1201
1202
1203InitializeClass(StudentsCatalog)
1204
1205###)
1206
1207class CertificatesCatalog(WAeUPTable): ###(
1208    security = ClassSecurityInfo()
1209
1210    meta_type = 'WAeUP Certificates Catalog'
1211    name =  "certificates_catalog"
1212    key = "code"
1213    def __init__(self,name=None):
1214        if name ==  None:
1215            name =  self.name
1216        WAeUPTable.__init__(self, name)
1217
1218    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1219        """ clear the catalog, then re-index everything """
1220
1221        elapse = time.time()
1222        c_elapse = time.clock()
1223
1224        pgthreshold = self._getProgressThreshold()
1225        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1226        self.refreshCatalog(clear=1, pghandler=handler)
1227
1228        elapse = time.time() - elapse
1229        c_elapse = time.clock() - c_elapse
1230
1231        RESPONSE.redirect(
1232            URL1 +
1233            '/manage_catalogAdvanced?manage_tabs_message=' +
1234            urllib.quote('Catalog Updated \n'
1235                         'Total time: %s\n'
1236                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1237    ###)
1238
1239    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1240        if isinstance(name, str):
1241            name = (name,)
1242        certificates = self.portal_catalog(portal_type="Certificate")
1243        num_objects = len(certificates)
1244        if pghandler:
1245            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1246        for i in xrange(num_objects):
1247            if pghandler: pghandler.report(i)
1248            certificate_brain = certificates[i]
1249            certificate_object = certificate_brain.getObject()
1250            pl = certificate_brain.getPath().split('/')
1251            data = {}
1252            cid = data[self.key] = certificate_brain.getId
1253            data['faculty'] = pl[-4]
1254            data['department'] = pl[-3]
1255            doc = certificate_object.getContent()
1256            for field in name:
1257                if field not in (self.key,'faculty','department'):
1258                    data[field] = getattr(doc,field)
1259            self.modifyRecord(**data)
1260        if pghandler: pghandler.finish()
1261    ###)
1262
1263    def refreshCatalog(self, clear=0, pghandler=None): ###(
1264        """ re-index everything we can find """
1265        if clear:
1266            self._catalog.clear()
1267        certificates = self.portal_catalog(portal_type="Certificate")
1268        num_objects = len(certificates)
1269        if pghandler:
1270            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1271        #from pdb import set_trace;set_trace()
1272        for i in xrange(num_objects):
1273            if pghandler: pghandler.report(i)
1274            certificate_brain = certificates[i]
1275            certificate_doc = certificate_brain.getObject().getContent()
1276            pl = certificate_brain.getPath().split('/')
1277            data = {}
1278            for field in self.schema():
1279                data[field] = getattr(certificate_doc,field,None)
1280            data[self.key] = certificate_brain.getId
1281            ai = pl.index('academics')
1282            data['faculty'] = pl[ai +1]
1283            data['department'] = pl[ai +2]
1284            if clear:
1285                self.addRecord(**data)
1286            else:
1287                self.modifyRecord(**data)
1288        if pghandler: pghandler.finish()
1289    ###)
1290
1291    security.declarePrivate('notify_event_listener') ###(
1292    def notify_event_listener(self,event_type,object,infos):
1293        "listen for events"
1294        if not infos.has_key('rpath'):
1295            return
1296        pt = getattr(object,'portal_type',None)
1297        mt = getattr(object,'meta_type',None)
1298        if pt != 'Certificate':
1299            return
1300        data = {}
1301        rpl = infos['rpath'].split('/')
1302        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1303            return
1304        certificate_id = object.getId()
1305        data[self.key] = certificate_id
1306        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1307            try:
1308                self.addRecord(**data)
1309            except ValueError:
1310                return
1311            certificate_id = object.getId()
1312            doc = object.getContent()
1313            if doc is None:
1314                return
1315            for field in self.schema():
1316                data[field] = getattr(doc,field,None)
1317            data[self.key] = certificate_id
1318            ai = rpl.index('academics')
1319            data['faculty'] = rpl[ai +1]
1320            data['department'] = rpl[ai +2]
1321            self.modifyRecord(**data)
1322            return
1323        if event_type == "sys_del_object":
1324            self.deleteRecord(certificate_id)
1325            return
1326        if event_type == "sys_modify_object" and mt == 'Certificate':
1327            #from pdb import set_trace;set_trace()
1328            for field in self.schema():
1329                data[field] = getattr(object,field,None)
1330            certificate_id = object.aq_parent.getId()
1331            data[self.key] = certificate_id
1332            ai = rpl.index('academics')
1333            data['faculty'] = rpl[ai +1]
1334            data['department'] = rpl[ai +2]
1335            self.modifyRecord(**data)
1336    ###)
1337
1338
1339InitializeClass(CertificatesCatalog)
1340###)
1341
1342class CoursesCatalog(WAeUPTable): ###(
1343    security = ClassSecurityInfo()
1344
1345    meta_type = 'WAeUP Courses Catalog'
1346    name =  "courses_catalog"
1347    key = "code"
1348    def __init__(self,name=None):
1349        if name ==  None:
1350            name =  self.name
1351        WAeUPTable.__init__(self, name)
1352
1353    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1354        """ clear the catalog, then re-index everything """
1355
1356        elapse = time.time()
1357        c_elapse = time.clock()
1358
1359        pgthreshold = self._getProgressThreshold()
1360        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1361        self.refreshCatalog(clear=1, pghandler=handler)
1362
1363        elapse = time.time() - elapse
1364        c_elapse = time.clock() - c_elapse
1365
1366        RESPONSE.redirect(
1367            URL1 +
1368            '/manage_catalogAdvanced?manage_tabs_message=' +
1369            urllib.quote('Catalog Updated \n'
1370                         'Total time: %s\n'
1371                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1372    ###)
1373
1374    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1375        if isinstance(name, str):
1376            name = (name,)
1377        courses = self.portal_catalog(portal_type="Course")
1378        num_objects = len(courses)
1379        if pghandler:
1380            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1381        for i in xrange(num_objects):
1382            if pghandler: pghandler.report(i)
1383            course_brain = courses[i]
1384            course_object = course_brain.getObject()
1385            pl = course_brain.getPath().split('/')
1386            data = {}
1387            cid = data[self.key] = course_brain.getId
1388            data['faculty'] = pl[-4]
1389            data['department'] = pl[-3]
1390            doc = course_object.getContent()
1391            for field in name:
1392                if field not in (self.key,'faculty','department'):
1393                    data[field] = getattr(doc,field)
1394            self.modifyRecord(**data)
1395        if pghandler: pghandler.finish()
1396    ###)
1397
1398    def refreshCatalog(self, clear=0, pghandler=None): ###(
1399        """ re-index everything we can find """
1400        if clear:
1401            self._catalog.clear()
1402        courses = self.portal_catalog(portal_type="Course")
1403        num_objects = len(courses)
1404        if pghandler:
1405            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1406        #from pdb import set_trace;set_trace()
1407        for i in xrange(num_objects):
1408            if pghandler: pghandler.report(i)
1409            course_brain = courses[i]
1410            course_doc = course_brain.getObject().getContent()
1411            pl = course_brain.getPath().split('/')
1412            data = {}
1413            for field in self.schema():
1414                data[field] = getattr(course_doc,field,None)
1415            data[self.key] = course_brain.getId
1416            ai = pl.index('academics')
1417            data['faculty'] = pl[ai +1]
1418            data['department'] = pl[ai +2]
1419            if clear:
1420                self.addRecord(**data)
1421            else:
1422                self.modifyRecord(**data)
1423        if pghandler: pghandler.finish()
1424    ###)
1425
1426    security.declarePrivate('notify_event_listener') ###(
1427    def notify_event_listener(self,event_type,object,infos):
1428        "listen for events"
1429        if not infos.has_key('rpath'):
1430            return
1431        pt = getattr(object,'portal_type',None)
1432        mt = getattr(object,'meta_type',None)
1433        if pt != 'Course':
1434            return
1435        data = {}
1436        rpl = infos['rpath'].split('/')
1437        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1438            return
1439        course_id = object.getId()
1440        data[self.key] = course_id
1441        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1442            try:
1443                self.addRecord(**data)
1444            except ValueError:
1445                return
1446            course_id = object.getId()
1447            doc = object.getContent()
1448            if doc is None:
1449                return
1450            for field in self.schema():
1451                data[field] = getattr(doc,field,None)
1452            data[self.key] = course_id
1453            ai = rpl.index('academics')
1454            data['faculty'] = rpl[ai +1]
1455            data['department'] = rpl[ai +2]
1456            self.modifyRecord(**data)
1457            return
1458        if event_type == "sys_del_object":
1459            self.deleteRecord(course_id)
1460            return
1461        if event_type == "sys_modify_object" and mt == 'Course':
1462            #from pdb import set_trace;set_trace()
1463            for field in self.schema():
1464                data[field] = getattr(object,field,None)
1465            course_id = object.aq_parent.getId()
1466            data[self.key] = course_id
1467            ai = rpl.index('academics')
1468            data['faculty'] = rpl[ai +1]
1469            data['department'] = rpl[ai +2]
1470            self.modifyRecord(**data)
1471    ###)
1472
1473
1474InitializeClass(CoursesCatalog)
1475###)
1476
1477class CourseResults(WAeUPTable): ###(
1478    security = ClassSecurityInfo()
1479
1480    meta_type = 'WAeUP Results Catalog'
1481    name = "course_results"
1482    key = "key" #student_id + level + course_id
1483    def __init__(self,name=None):
1484        if name ==  None:
1485            name = self.name
1486        WAeUPTable.__init__(self, name)
1487        self._queue = []
1488
1489    def addMultipleRecords(self, records): ###(
1490        """add many records"""
1491        existing_uids = []
1492        for data in records:
1493            uid = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1494            data['%s' % self.key] = uid
1495            query = Eq(self.key, uid)
1496            res = self.course_results.evalAdvancedQuery(query)
1497            if len(res) > 0:
1498                rec = res[0]
1499                equal = True
1500                for attr in ('student_id','level_id','course_id'):
1501                    if getattr(rec,attr,'') != data[attr]:
1502                        equal = False
1503                        break
1504                if equal:
1505                    existing_uids += uid,
1506                    continue
1507            self.catalog_object(dict2ob(data), uid=uid)
1508        return existing_uids
1509    ###)
1510
1511    def deleteResultsHere(self,level_id,student_id): ###(
1512        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1513        course_results = self.course_results.evalAdvancedQuery(query)
1514        #import pdb;pdb.set_trace()
1515        for result in course_results:
1516            self.deleteRecord(result.key)
1517    ###)
1518
1519    def moveResultsHere(self,level,student_id): ###(
1520        #import pdb;pdb.set_trace()
1521        level_id = level.getId()
1522        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1523        course_results = self.course_results.evalAdvancedQuery(query)
1524        existing_courses = [cr.code for cr in course_results]
1525        to_delete = []
1526        for code,obj in level.objectItems():
1527            to_delete.append(code)
1528            carry_over = False
1529            if code.endswith('_co'):
1530                carry_over = True
1531                code  = code[:-3]
1532            if code in existing_courses:
1533                continue
1534            course_result_doc = obj.getContent()
1535            data = {}
1536            course_id = code
1537            for field in self.schema():
1538                data[field] = getattr(course_result_doc,field,'')
1539            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1540            data['student_id'] = student_id
1541            data['level_id'] = level_id
1542            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1543            data['session_id'] = session_id
1544            #data['queue_status'] = OBJECT_CREATED
1545            data['code'] = course_id
1546            data['carry_over'] = carry_over
1547            self.catalog_object(dict2ob(data), uid=key)
1548        level.manage_delObjects(to_delete)
1549    ###)
1550
1551    def getCourses(self,student_id,level_id): ###(
1552        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1553        course_results = self.course_results.evalAdvancedQuery(query)
1554        carry_overs = []
1555        normal1 = []
1556        normal2 = []
1557        normal3 = []
1558        total_credits = 0
1559        gpa = 0
1560        for brain in course_results:
1561            d = {}
1562
1563            for field in self.schema():
1564                d[field] = getattr(brain,field,None)
1565                if repr(d[field]) == 'Missing.Value':
1566                    d[field] = ''
1567            d['weight'] = ''
1568            d['grade'] = ''
1569            d['score'] = ''
1570
1571            if str(brain.credits).isdigit():
1572                credits = int(brain.credits)
1573                total_credits += credits
1574                score = getattr(brain,'score',0)
1575                if score and str(score).isdigit() and int(score) > 0:
1576                    score = int(score)
1577                    grade,weight = self.getGradesFromScore(score,'')
1578                    gpa += weight * credits
1579                    d['weight'] = weight
1580                    d['grade'] = grade
1581                    d['score'] = score
1582
1583            #if str(brain.ca1).isdigit() and str(brain.ca2).isdigit() and str(brain.exam).isdigit():
1584            #    d['score_calc'] = int(brain.ca1) + int(brain.ca2) + int(brain.exam)
1585            #else:
1586            #    d['score_calc'] = ''
1587            try:
1588                d['score_calc'] = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1589            except:
1590                d['score_calc'] = ''
1591
1592            if d['score_calc']:
1593                grade = self.getGradesFromScore(d['score_calc'],level_id)
1594                d['grade'] = grade
1595
1596            d['coe'] = ''
1597            if brain.core_or_elective:
1598                d['coe'] = 'Core'
1599            elif brain.core_or_elective == False:
1600                d['coe'] = 'Elective'
1601            id = code = d['id'] = brain.code
1602            d['code'] = code
1603            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1604            if res:
1605                course = res[0]
1606                d['title'] = course.title
1607                # The courses_catalog contains strings and integers in its semester field.
1608                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1609                d['semester'] = str(course.semester)
1610            else:
1611                d['title'] = "Course has been removed from course list"
1612                d['semester'] = ''
1613            if brain.carry_over:
1614                d['coe'] = 'CO'
1615                carry_overs.append(d)
1616            else:
1617                if d['semester'] == '1':
1618                    normal1.append(d)
1619
1620                elif d['semester'] == '2':
1621                    normal2.append(d)
1622                else:
1623                    normal3.append(d)
1624        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1625        #                                "%(semester)s%(code)s" % y))
1626        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1627                                             "%(semester)s%(code)s" % y))
1628        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1629    ###)
1630
1631
1632    def getAllCourses(self,student_id): ###(
1633        query = Eq('student_id',student_id)
1634        course_results = self.course_results.evalAdvancedQuery(query)
1635        courses = []
1636        for brain in course_results:
1637            d = {}
1638
1639            for field in self.schema():
1640                d[field] = getattr(brain,field,'')
1641
1642            d['weight'] = ''
1643            d['grade'] = ''
1644            d['score'] = ''
1645
1646            if str(brain.credits).isdigit():
1647                credits = int(brain.credits)
1648                score = getattr(brain,'score',0)
1649                if score and str(score).isdigit() and int(score) > 0:
1650                    score = int(score)
1651                    grade,weight = self.getGradesFromScore(score)
1652                    d['weight'] = weight
1653                    d['grade'] = grade
1654                    d['score'] = score
1655            d['coe'] = ''
1656            if brain.core_or_elective:
1657                d['coe'] = 'Core'
1658            elif brain.core_or_elective == False:
1659                d['coe'] = 'Elective'
1660            id = code = d['id'] = brain.code
1661            d['code'] = code
1662            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1663            if res:
1664                course = res[0]
1665                d['title'] = course.title
1666                # The courses_catalog contains strings and integers in its semester field.
1667                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1668                d['semester'] = str(course.semester)
1669            else:
1670                d['title'] = "Course has been removed from course list"
1671                d['semester'] = ''
1672            if brain.carry_over:
1673                d['coe'] = 'CO'
1674            courses.append(d)
1675        return courses
1676    ###)
1677
1678InitializeClass(CourseResults)
1679###)
1680
1681class OnlinePaymentsImport(WAeUPTable): ###(
1682
1683    meta_type = 'WAeUP Online Payment Transactions'
1684    name = "online_payments_import"
1685    key = "order_id"
1686    def __init__(self,name=None):
1687        if name ==  None:
1688            name = self.name
1689        WAeUPTable.__init__(self, name)
1690
1691
1692InitializeClass(OnlinePaymentsImport)
1693###)
1694
1695class ReturningImport(WAeUPTable): ###(
1696
1697    meta_type = 'Returning Import Table'
1698    name = "returning_import"
1699    key = "matric_no"
1700    def __init__(self,name=None):
1701        if name ==  None:
1702            name = self.name
1703        WAeUPTable.__init__(self, name)
1704
1705
1706InitializeClass(ReturningImport)
1707###)
1708
1709class ResultsImport(WAeUPTable): ###(
1710
1711    meta_type = 'Results Import Table'
1712    name = "results_import"
1713    key = "key"
1714    def __init__(self,name=None):
1715        if name ==  None:
1716            name = self.name
1717        WAeUPTable.__init__(self, name)
1718
1719
1720InitializeClass(ResultsImport)
1721
1722###)
1723
1724class PaymentsCatalog(WAeUPTable): ###(
1725    security = ClassSecurityInfo()
1726
1727    meta_type = 'WAeUP Payments Catalog'
1728    name = "payments_catalog"
1729    key = "order_id"
1730    def __init__(self,name=None):
1731        if name ==  None:
1732            name = self.name
1733        WAeUPTable.__init__(self, name)
1734
1735
1736    security.declarePrivate('notify_event_listener') ###(
1737    def notify_event_listener(self,event_type,object,infos):
1738        "listen for events"
1739        if not infos.has_key('rpath'):
1740            return
1741        pt = getattr(object,'portal_type',None)
1742        mt = getattr(object,'meta_type',None)
1743        data = {}
1744        if pt != 'Payment':
1745            return
1746        if event_type == 'sys_del_object' and mt == 'CPS Proxy Folder':
1747            self.deleteRecord(object.getContent().order_id)
1748        if mt == 'CPS Proxy Folder':
1749            return # is handled only for the real object
1750        if event_type not in ('sys_modify_object'):
1751            return
1752        for field in self.schema():
1753            data[field] = getattr(object,field,'')
1754        rpl = infos['rpath'].split('/')
1755        #import pdb;pdb.set_trace()
1756        student_id = rpl[-4]
1757        data['student_id'] = student_id
1758        modified = False
1759        try:
1760            self.modifyRecord(**data)
1761            modified = True
1762        except KeyError:
1763            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1764            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1765            pass
1766        if not modified:
1767            try:
1768                self.addRecord(**data)
1769            except:
1770                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1771                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1772        ###)
1773
1774
1775InitializeClass(PaymentsCatalog)
1776
1777###)
1778
1779# BBB:
1780AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.