source: WAeUP_SRP/trunk/WAeUPTables.py @ 5240

Last change on this file since 5240 was 5202, checked in by Henrik Bettermann, 14 years ago

add metadata firstname, middlename and lastname in students_catalog

  • Property svn:keywords set to Id
File size: 75.3 KB
RevLine 
[966]1#-*- mode: python; mode: fold -*-
[363]2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 5202 2010-05-23 04:26:23Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
[1620]24from Products.ZCatalog.ProgressHandler import ZLogHandler
[780]25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
[2094]27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
[1700]33import urllib
[1620]34import DateTime,time
[3989]35import csv,re,os
[780]36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
[2084]41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
[2845]43NOT_OCCUPIED = 'not_occupied'
[2084]44
[363]45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
[1146]56class WAeUPTable(ZCatalog): ###(
[834]57
[363]58    implements(IWAeUPTable)
[780]59    security = ClassSecurityInfo()
[2094]60    meta_type = None
[2099]61
[2094]62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
[2099]66
[2094]67    def refreshCatalog(self, clear=0, pghandler=None): ###(
[1620]68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
[2094]75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
[1620]78        """ clears the whole enchilada """
[1986]79
[1916]80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
[1620]84
[1916]85        self._catalog.clear()
[1620]86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
[1916]89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
[1620]90
[2094]91###)
92
[4244]93    def record2dict(self,fields,record,index): ###(
[2189]94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
[3018]97            v_dump = v
[2192]98            if key == 'sex':
[3017]99                if v == True:
100                    v_dump = 'F'
[3018]101                elif v == False:
[3017]102                    v_dump = 'M'
103                d[key] = v_dump
[2192]104            elif v:
[4244]105                if index == 'translate':
106                    if key == 'lga':
107                        v_dump = self.portal_vocabularies.local_gov_areas.get(v)
108                        if not v_dump:
109                            v_dump = v
110                    elif key == 'aos':
111                        v_dump = self.portal_vocabularies.aos.get(v)
[3017]112                d[key] = v_dump
[2189]113            else:
114                d[key] = ''
115        return d
[2191]116
[2632]117###)
118
[2094]119    def addRecord(self, **data): ###(
[502]120        # The uid is the same as "bed".
121        uid = data[self.key]
122        res = self.searchResults({"%s" % self.key : uid})
123        if len(res) > 0:
124            raise ValueError("More than one record with uid %s" % uid)
125        self.catalog_object(dict2ob(data), uid=uid)
126        return uid
[834]127
[2094]128###)
129
[363]130    def deleteRecord(self, uid):
131        self.uncatalog_object(uid)
[834]132
[2738]133    def getRecordByKey(self,key):
134        if not key:
135            return None
136        res = self.evalAdvancedQuery(Eq(self.key,key))
137        if res:
138            return res[0]
139        return None
140
[502]141    def searchAndSetRecord(self, **data):
142        raise NotImplemented
143
[2094]144    def modifyRecord(self, record=None, **data): ###(
[502]145        #records = self.searchResults(uid=uid)
146        uid = data[self.key]
[2069]147        if record is None:
148            records = self.searchResults({"%s" % self.key : uid})
149            if len(records) > 1:
150                # Can not happen, but anyway...
151                raise ValueError("More than one record with uid %s" % uid)
152            if len(records) == 0:
153                raise KeyError("No record for uid %s" % uid)
154            record = records[0]
[363]155        record_data = {}
156        for field in self.schema() + self.indexes():
157            record_data[field] = getattr(record, field)
158        # Add the updated data:
159        record_data.update(data)
160        self.catalog_object(dict2ob(record_data), uid)
161
[2094]162###)
163
164    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
[1062]165        if isinstance(name, str):
[2094]166            name =  (name,)
[1062]167        paths = self._catalog.uids.items()
168        i = 0
169        #import pdb;pdb.set_trace()
170        for p,rid in paths:
171            i += 1
172            metadata = self.getMetadataForRID(rid)
173            record_data = {}
174            for field in name:
175                record_data[field] = metadata.get(field)
176            uid = metadata.get(self.key)
177            self.catalog_object(dict2ob(record_data), uid, idxs=name,
178                                update_metadata=0)
[1082]179
[2094]180###)
181
182    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
[780]183    def exportAllRecords(self):
184        "export a WAeUPTable"
185        #import pdb;pdb.set_trace()
186        fields = [field for field in self.schema()]
187        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
188        csv = []
189        csv.append(','.join(['"%s"' % fn for fn in fields]))
190        for uid in self._catalog.uids:
191            records = self.searchResults({"%s" % self.key : uid})
192            if len(records) > 1:
193                # Can not happen, but anyway...
194                raise ValueError("More than one record with uid %s" % uid)
195            if len(records) == 0:
196                raise KeyError("No record for uid %s" % uid)
197            rec = records[0]
198            csv.append(format % rec)
199        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
200        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
[2094]201
202###)
203
[2189]204    security.declareProtected(ModifyPortalContent,"dumpAll")###(
[3757]205    def dumpAll(self,index=None,value=None):
[2189]206        """dump all data in the table to a csv"""
207        member = self.portal_membership.getAuthenticatedMember()
[2974]208        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
[2189]209        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
210        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
211        res_list = []
212        lines = []
213        if hasattr(self,"export_keys"):
214            fields = self.export_keys
215        else:
216            fields = []
217            for f in self.schema():
218                fields.append(f)
219        headline = ','.join(fields)
220        out = open(export_file,"wb")
221        out.write(headline +'\n')
222        out.close()
223        out = open(export_file,"a")
224        csv_writer = csv.DictWriter(out,fields,)
[3757]225        if index is not None and value is not None:
226            records = self.evalAdvancedQuery(Eq(index,value))
227        else:
228            records = self()
[2189]229        nr2export = len(records)
230        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
231        chunk = 2000
232        total = 0
233        start = DateTime.DateTime().timeTime()
234        start_chunk = DateTime.DateTime().timeTime()
235        for record in records:
236            not_all = False
[4244]237            d = self.record2dict(fields,record,index)
[2189]238            lines.append(d)
239            total += 1
240            if total and not total % chunk or total == len(records):
241                csv_writer.writerows(lines)
242                anz = len(lines)
243                logger.info("wrote %(anz)d  total written %(total)d" % vars())
244                end_chunk = DateTime.DateTime().timeTime()
245                duration = end_chunk-start_chunk
246                per_record = duration/anz
247                till_now = end_chunk - start
248                avarage_per_record = till_now/total
249                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
250                estimated_end = estimated_end.strftime("%H:%M:%S")
251                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
252                start_chunk = DateTime.DateTime().timeTime()
253                lines = []
254        end = DateTime.DateTime().timeTime()
255        logger.info('total time %6.2f m' % ((end-start)/60))
256        import os
257        filename, extension = os.path.splitext(export_file)
258        from subprocess import call
259        msg = "wrote %(total)d records to %(export_file)s" % vars()
[2561]260        #try:
261        #    retcode = call('gzip %s' % (export_file),shell=True)
262        #    if retcode == 0:
263        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
264        #except OSError, e:
265        #    retcode = -99
266        #    logger.info("zip failed with %s" % e)
[2189]267        logger.info(msg)
268        args = {'portal_status_message': msg}
269        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
270        url = self.REQUEST.get('URL2')
[4537]271        return 'ready'
272        #return self.REQUEST.RESPONSE.redirect(url)
[2189]273    ###)
274
[2185]275    security.declarePrivate("_import_old") ###(
276    def _import_old(self,filename,schema,layout, mode,logger):
[2094]277        "import data from csv"
278        import transaction
279        import random
280        pm = self.portal_membership
281        member = pm.getAuthenticatedMember()
282        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
283        import_fn = "%s/import/%s.csv" % (i_home,filename)
284        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
285        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
286        start = True
287        tr_count = 1
288        total_imported = 0
289        total_not_imported = 0
290        total = 0
291        iname =  "%s" % filename
[2112]292        not_imported = []
293        imported = []
294        valid_records = []
295        invalid_records = []
296        d = {}
297        d['mode'] = mode
298        d['imported'] = total_imported
299        d['not_imported'] = total_not_imported
300        d['valid_records'] = valid_records
301        d['invalid_records'] = invalid_records
302        d['import_fn'] = import_fn
303        d['imported_fn'] = imported_fn
304        d['not_imported_fn'] = not_imported_fn
[2094]305        if schema is None:
306            em = 'No schema specified'
307            logger.error(em)
[2112]308            return d
[2094]309        if layout is None:
310            em = 'No layout specified'
311            logger.error(em)
[2112]312            return d
[2094]313        validators = {}
314        for widget in layout.keys():
[2112]315            try:
316                validators[widget] = layout[widget].validate
317            except AttributeError:
318                logger.info('%s has no validate attribute' % widget)
319                return d
[2094]320        # if mode == 'edit':
321        #     importer = self.importEdit
322        # elif mode == 'add':
323        #     importer = self.importAdd
324        # else:
325        #     importer = None
326        try:
[2185]327            items = csv.DictReader(open(import_fn,"rb"),
328                                   dialect="excel",
329                                   skipinitialspace=True)
[2094]330        except:
331            em = 'Error reading %s.csv' % filename
332            logger.error(em)
333            return d
[2185]334        #import pdb;pdb.set_trace()
[2094]335        for item in items:
336            if start:
337                start = False
338                logger.info('%s starts import from %s.csv' % (member,filename))
339                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
[2185]340                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
341                                   dialect="excel",
342                                   skipinitialspace=True).next()
[2094]343                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
344                diff2schema = set(import_keys).difference(set(schema.keys()))
345                diff2layout = set(import_keys).difference(set(layout.keys()))
346                if diff2layout:
347                    em = "not ignorable key(s) %s found in heading" % diff2layout
348                    logger.info(em)
349                    return d
350                s = ','.join(['"%s"' % fn for fn in import_keys])
351                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
352                #s = '"id",' + s
353                open(imported_fn,"a").write(s + '\n')
354                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
355                format_error = format + ',"%(Error)s"'
356                #format = '"%(id)s",'+ format
357                adapters = [MappingStorageAdapter(schema, item)]
358            dm = DataModel(item, adapters,context=self)
359            ds = DataStructure(data=item,datamodel=dm)
360            error_string = ""
[2503]361            #import pdb;pdb.set_trace()
[2094]362            for k in import_keys:
363                if not validators[k](ds,mode=mode):
364                    error_string += " %s : %s" % (k,ds.getError(k))
365            # if not error_string and importer:
366            #     item.update(dm)
367            #     item['id'],error = importer(item)
368            #     if error:
369            #         error_string += error
370            if error_string:
371                item['Error'] = error_string
372                invalid_records.append(dm)
373                not_imported.append(format_error % item)
374                total_not_imported += 1
375            else:
376                em = format % item
377                valid_records.append(dm)
378                imported.append(em)
379                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
380                tr_count += 1
381                total_imported += 1
382            total += 1
383        if len(imported) > 0:
384            open(imported_fn,"a").write('\n'.join(imported))
385        if len(not_imported) > 0:
386            open(not_imported_fn,"a").write('\n'.join(not_imported))
387        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
388        d['imported'] = total_imported
389        d['not_imported'] = total_not_imported
390        d['valid_records'] = valid_records
391        d['invalid_records'] = invalid_records
392        d['imported_fn'] = imported_fn
393        d['not_imported_fn'] = not_imported_fn
394        #logger.info(em)
395        return d
[1935]396    ###)
[2185]397
398    security.declarePrivate("_import") ###(
399    def _import_new(self,csv_items,schema, layout, mode,logger):
400        "import data from csv.Dictreader Instance"
401        start = True
402        tr_count = 1
403        total_imported = 0
404        total_not_imported = 0
405        total = 0
406        iname =  "%s" % filename
407        not_imported = []
408        valid_records = []
409        invalid_records = []
410        duplicate_records = []
411        d = {}
412        d['mode'] = mode
413        d['valid_records'] = valid_records
414        d['invalid_records'] = invalid_records
415        d['invalid_records'] = duplicate_records
416        # d['import_fn'] = import_fn
417        # d['imported_fn'] = imported_fn
418        # d['not_imported_fn'] = not_imported_fn
419        validators = {}
420        for widget in layout.keys():
421            try:
422                validators[widget] = layout[widget].validate
423            except AttributeError:
424                logger.info('%s has no validate attribute' % widget)
425                return d
426        for item in csv_items:
427            if start:
428                start = False
429                logger.info('%s starts import from %s.csv' % (member,filename))
430                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
431                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
432                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
433                diff2schema = set(import_keys).difference(set(schema.keys()))
434                diff2layout = set(import_keys).difference(set(layout.keys()))
435                if diff2layout:
436                    em = "not ignorable key(s) %s found in heading" % diff2layout
437                    logger.info(em)
438                    return d
439                # s = ','.join(['"%s"' % fn for fn in import_keys])
440                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
441                # #s = '"id",' + s
442                # open(imported_fn,"a").write(s + '\n')
443                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
444                # format_error = format + ',"%(Error)s"'
445                # #format = '"%(id)s",'+ format
446                adapters = [MappingStorageAdapter(schema, item)]
447            dm = DataModel(item, adapters,context=self)
448            ds = DataStructure(data=item,datamodel=dm)
449            error_string = ""
450            for k in import_keys:
451                if not validators[k](ds,mode=mode):
452                    error_string += " %s : %s" % (k,ds.getError(k))
453            if error_string:
454                item['Error'] = error_string
455                #invalid_records.append(dm)
456                invalid_records.append(item)
457                total_not_imported += 1
458            else:
459                em = format % item
460                valid_records.append(dm)
461                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
462                tr_count += 1
463                total_imported += 1
464            total += 1
465        # if len(imported) > 0:
466        #     open(imported_fn,"a").write('\n'.join(imported))
467        # if len(not_imported) > 0:
468        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
469        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
470        d['imported'] = total_imported
471        d['not_imported'] = total_not_imported
472        d['valid_records'] = valid_records
473        d['invalid_records'] = invalid_records
474        return d
475    ###)
476
[2396]477    security.declarePublic("missingValue")###(
478    def missingValue(self):
479        from Missing import MV
480        return MV
481    ###)
[2094]482###)
[834]483
[1146]484class AccommodationTable(WAeUPTable): ###(
[834]485
[404]486    meta_type = 'WAeUP Accommodation Tool'
[2094]487    name = "portal_accommodation"
[502]488    key = "bed"
[3043]489    not_occupied = NOT_OCCUPIED
[2094]490    def __init__(self,name=None):
491        if name ==  None:
492            name = self.name
493        WAeUPTable.__init__(self, name)
[2866]494
[3772]495    def searchAndReserveBed(self, student_id,bed_type,random_order=False): ###(
[3406]496        logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
[2845]497        records = self.evalAdvancedQuery(Eq('student',student_id))
[3406]498        if len(records) == 1:
499            #return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
500            logger.info('%s found (reserved) bed %s' % (student_id,records[0].bed))
501            return -1,records[0].bed
502        elif len(records) > 1:
503            logger.info('%s found more than one (reserved) bed' % (student_id))
[3408]504            return -3,'more than one bed'
[2845]505        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
506        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
[635]507        if len(records) == 0:
[3408]508            logger.info('no bed %s available for %s' % (bed_type,student_id))
[3406]509            return -2,"no bed"
[3772]510        if random_order:
511            import random
512            bed_no = random.randint(0,len(records)-1)
513        else:
514            bed_no = 0
515        rec = records[bed_no]
[635]516        self.modifyRecord(bed=rec.bed,student=student_id)
[3406]517        logger.info('%s booked bed %s' % (student_id,rec.bed))
[635]518        return 1,rec.bed
[3043]519    ###)
[363]520
[834]521
[404]522InitializeClass(AccommodationTable)
[411]523
[1146]524###)
525
526class PinTable(WAeUPTable): ###(
[1030]527    from ZODB.POSException import ConflictError
[2973]528    security = ClassSecurityInfo()
[440]529    meta_type = 'WAeUP Pin Tool'
[2094]530    name = "portal_pins"
[502]531    key = 'pin'
[2859]532
[2094]533    def __init__(self,name=None):
534        if name ==  None:
535            name = self.name
536        WAeUPTable.__init__(self, name)
[1082]537
[2973]538    security.declareProtected(ModifyPortalContent,"dumpAll")###(
[4546]539    def dumpAll(self,include_unused=None,index=None):
[2973]540        """dump all data in the table to a csv"""
541        member = self.portal_membership.getAuthenticatedMember()
[2974]542        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
[2973]543        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
544        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
545        res_list = []
546        lines = []
547        if hasattr(self,"export_keys"):
548            fields = self.export_keys
549        else:
550            fields = []
551            for f in self.schema():
552                fields.append(f)
553        headline = ','.join(fields)
554        out = open(export_file,"wb")
555        out.write(headline +'\n')
556        out.close()
557        out = open(export_file,"a")
558        csv_writer = csv.DictWriter(out,fields,)
559        if include_unused is not None and str(member) not in ('admin','joachim'):
560            logger.info('%s tries to dump pintable with unused pins' % (member))
561            return
562        if include_unused is not None:
563            records = self()
564        else:
565            records = self.evalAdvancedQuery(~Eq('student',''))
566        nr2export = len(records)
567        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
568        chunk = 2000
569        total = 0
570        start = DateTime.DateTime().timeTime()
571        start_chunk = DateTime.DateTime().timeTime()
572        for record in records:
573            not_all = False
[4546]574            d = self.record2dict(fields,record,index)
[2973]575            lines.append(d)
576            total += 1
577            if total and not total % chunk or total == len(records):
578                csv_writer.writerows(lines)
579                anz = len(lines)
580                logger.info("wrote %(anz)d  total written %(total)d" % vars())
581                end_chunk = DateTime.DateTime().timeTime()
582                duration = end_chunk-start_chunk
583                per_record = duration/anz
584                till_now = end_chunk - start
585                avarage_per_record = till_now/total
586                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
587                estimated_end = estimated_end.strftime("%H:%M:%S")
588                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
589                start_chunk = DateTime.DateTime().timeTime()
590                lines = []
591        end = DateTime.DateTime().timeTime()
592        logger.info('total time %6.2f m' % ((end-start)/60))
593        import os
594        filename, extension = os.path.splitext(export_file)
595        from subprocess import call
596        msg = "wrote %(total)d records to %(export_file)s" % vars()
597        #try:
598        #    retcode = call('gzip %s' % (export_file),shell=True)
599        #    if retcode == 0:
600        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
601        #except OSError, e:
602        #    retcode = -99
603        #    logger.info("zip failed with %s" % e)
604        logger.info(msg)
605        args = {'portal_status_message': msg}
606        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
607        url = self.REQUEST.get('URL2')
608        return self.REQUEST.RESPONSE.redirect(url)
609    ###)
[1082]610
[2973]611
612
[710]613    def searchAndSetRecord(self, uid, student_id,prefix):
[2191]614
615        # The following line must be activated after resetting the
616        # the portal_pins table. This is to avoid duplicate entries
[2184]617        # and disable duplicate payments.
[2191]618
[2184]619        #student_id = student_id.upper()
620
[2716]621        #records = self.searchResults(student = student_id)
[2579]622        #if len(records) > 0 and prefix in ('CLR','APP'):
623        #    for r in records:
624        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
625        #            return -2
[502]626        records = self.searchResults({"%s" % self.key : uid})
627        if len(records) > 1:
628            # Can not happen, but anyway...
629            raise ValueError("More than one record with uid %s" % uid)
630        if len(records) == 0:
[2766]631            return -1,None
[502]632        record = records[0]
633        if record.student == "":
634            record_data = {}
635            for field in self.schema() + self.indexes():
636                record_data[field] = getattr(record, field)
637            # Add the updated data:
[635]638            record_data['student'] = student_id
[1030]639            try:
640                self.catalog_object(dict2ob(record_data), uid)
[2766]641                return 1,record
[1030]642            except ConflictError:
[2766]643                return 2,record
[990]644        if record.student.upper() != student_id.upper():
[2766]645            return 0,record
[997]646        if record.student.upper() == student_id.upper():
[2766]647            return 2,record
648        return -3,record
[440]649InitializeClass(PinTable)
[1146]650###)
[966]651
[1146]652class PumeResultsTable(WAeUPTable): ###(
653
[966]654    meta_type = 'WAeUP PumeResults Tool'
[2094]655    name = "portal_pumeresults"
[966]656    key = "jamb_reg_no"
[2094]657    def __init__(self,name=None):
658        if name ==  None:
659            name = self.name
660        WAeUPTable.__init__(self, name)
[966]661
662
663InitializeClass(PumeResultsTable)
664
[1146]665###)
[971]666
[2094]667class ApplicantsCatalog(WAeUPTable): ###(
668
[2113]669    meta_type = 'WAeUP Applicants Catalog'
[2094]670    name = "applicants_catalog"
671    key = "reg_no"
672    security = ClassSecurityInfo()
[2537]673    #export_keys = (
674    #               "reg_no",
675    #               "status",
676    #               "lastname",
677    #               "sex",
678    #               "date_of_birth",
679    #               "lga",
680    #               "email",
681    #               "phone",
682    #               "passport",
683    #               "entry_mode",
684    #               "pin",
685    #               "screening_type",
686    #               "registration_date",
687    #               "testdate",
688    #               "application_date",
689    #               "screening_date",
690    #               "faculty",
691    #               "department",
692    #               "course1",
693    #               "course2",
694    #               "course3",
695    #               "eng_score",
696    #               "subj1",
697    #               "subj1score",
698    #               "subj2",
699    #               "subj2score",
700    #               "subj3",
701    #               "subj3score",
702    #               "aggregate",
703    #               "course_admitted",
704    #               )
[2632]705
[2094]706    def __init__(self,name=None):
707        if name ==  None:
708            name = self.name
709        WAeUPTable.__init__(self, name)
710
[2185]711    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
712    def new_importCSV(self,filename="JAMB_data",
713                  schema_id="application",
[2503]714                  layout_id="import_application",
[2185]715                  mode='add'):
716        """ import JAMB data """
717        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
718        pm = self.portal_membership
719        member = pm.getAuthenticatedMember()
720        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
721        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
722        import_fn = "%s/import/%s.csv" % (i_home,filename)
723        if mode not in ('add','edit'):
724            logger.info("invalid mode: %s" % mode)
725        if os.path.exists(lock_fn):
726            logger.info("import of %(import_fn)s already in progress" % vars())
727            return
728        lock_file = open(lock_fn,"w")
729        lock_file.write("%(current)s \n" % vars())
730        lock_file.close()
731        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
732        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
733        stool = getToolByName(self, 'portal_schemas')
734        ltool = getToolByName(self, 'portal_layouts')
735        schema = stool._getOb(schema_id)
736        if schema is None:
737            em = 'No such schema %s' % schema_id
738            logger.error(em)
739            return
740        for postfix in ('_import',''):
741            layout_name = "%(layout_id)s%(postfix)s" % vars()
742            if hasattr(ltool,layout_name):
743                break
744        layout = ltool._getOb(layout_name)
745        if layout is None:
746            em = 'No such layout %s' % layout_id
747            logger.error(em)
748            return
749        try:
750            csv_file = csv.DictReader(open(import_fn,"rb"))
751        except:
752            em = 'Error reading %s.csv' % filename
753            logger.error(em)
[2191]754            return
[2185]755        d = self._import_new(csv_items,schema,layout,mode,logger)
756        imported = []
757        edited = []
758        duplicates = []
759        not_found = []
760        if len(d['valid_records']) > 0:
761            for record in d['valid_records']:
762                #import pdb;pdb.set_trace()
763                if mode == "add":
764                    try:
765                        self.addRecord(**dict(record.items()))
766                        imported.append(**dict(record.items()))
767                        logger.info("added %s" % record.items())
768                    except ValueError:
769                        dupplicate.append(**dict(record.items()))
770                        logger.info("duplicate %s" % record.items())
771                elif mode == "edit":
772                    try:
773                        self.modifyRecord(**dict(record.items()))
774                        edited.append(**dict(record.items()))
775                        logger.info("edited %s" % record.items())
776                    except KeyError:
777                        not_found.append(**dict(record.items()))
778                        logger.info("not found %s" % record.items())
779        invalid = d['invalid_records']
780        for itype in ("imported","edited","not_found","duplicate","invalid"):
781            outlist = locals[itype]
782            if len(outlist):
783                d = {}
784                for k in outlist[0].keys():
785                    d[k] = k
[2191]786                outlist[0] = d
[2185]787                outfile = open("file_name_%s" % itype,'w')
788                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
789                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
790###)
791
[2094]792    security.declareProtected(ModifyPortalContent,"importCSV")###(
793    def importCSV(self,filename="JAMB_data",
794                  schema_id="application",
[2508]795                  layout_id="application_pce",
[2094]796                  mode='add'):
797        """ import JAMB data """
798        stool = getToolByName(self, 'portal_schemas')
799        ltool = getToolByName(self, 'portal_layouts')
800        schema = stool._getOb(schema_id)
801        if schema is None:
802            em = 'No such schema %s' % schema_id
803            logger.error(em)
804            return
805        layout = ltool._getOb(layout_id)
806        if layout is None:
807            em = 'No such layout %s' % layout_id
808            logger.error(em)
809            return
[2099]810        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
[2185]811        d = self._import_old(filename,schema,layout,mode,logger)
[2094]812        if len(d['valid_records']) > 0:
813            for record in d['valid_records']:
814                #import pdb;pdb.set_trace()
815                if mode == "add":
816                    self.addRecord(**dict(record.items()))
817                    logger.info("added %s" % record.items())
818                elif mode == "edit":
819                    self.modifyRecord(**dict(record.items()))
820                    logger.info("edited %s" % record.items())
821                else:
822                    logger.info("invalid mode: %s" % mode)
823        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
[2632]824    ###)
[2094]825
826InitializeClass(ApplicantsCatalog)
827
828###)
829
[1146]830class StudentsCatalog(WAeUPTable): ###(
[1620]831    security = ClassSecurityInfo()
[1146]832
[971]833    meta_type = 'WAeUP Students Catalog'
834    name = "students_catalog"
835    key = "id"
[1700]836    affected_types = {   ###(
[1749]837                      'StudentApplication':
[2069]838                      {'id': 'application',
839                       'fields':
840                       ('jamb_reg_no',
841                        'entry_mode',
842                        #'entry_level',
843                        'entry_session',
844                       )
845                      },
[1700]846                      'StudentClearance':
[2069]847                      {'id': 'clearance',
848                       'fields':
849                       ('matric_no',
850                        'lga',
[5200]851                        'date_of_birth',  # birthday
[2069]852                       )
853                      },
854                      'StudentPersonal':
855                      {'id': 'personal',
856                       'fields':
857                       ('name',
858                        'sex',
859                        'perm_address',
860                        'email',
861                        'phone',
[5200]862                        'marit_stat',
[5202]863                        'firstname',
864                        'middlename',
865                        'lastname',
[2069]866                       )
867                      },
868                      'StudentStudyCourse':
869                      {'id': 'study_course',
870                       'fields':
871                       ('course', # study_course
872                        'faculty', # from certificate
873                        'department', # from certificate
874                        'end_level', # from certificate
875                        'level', # current_level
[3780]876                        'mode',  # from certificate
[2069]877                        'session', # current_session
878                        'verdict', # current_verdict
879                       )
880                      },
881                     }
[1700]882    ###)
[1625]883
[2094]884    def __init__(self,name=None):
885        if name ==  None:
886            name = self.name
887        WAeUPTable.__init__(self, name)
[1620]888        return
[1625]889
[1700]890    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
891        """ clears the whole enchilada """
892        self._catalog.clear()
[971]893
[1700]894        if REQUEST and RESPONSE:
895            RESPONSE.redirect(
896              URL1 +
897              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
[971]898
[1700]899    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
900        """ clear the catalog, then re-index everything """
901
902        elapse = time.time()
903        c_elapse = time.clock()
904
905        pgthreshold = self._getProgressThreshold()
906        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
907        self.refreshCatalog(clear=1, pghandler=handler)
908
909        elapse = time.time() - elapse
910        c_elapse = time.clock() - c_elapse
911
912        RESPONSE.redirect(
913            URL1 +
914            '/manage_catalogAdvanced?manage_tabs_message=' +
915            urllib.quote('Catalog Updated \n'
916                         'Total time: %s\n'
917                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
918    ###)
919
[2084]920    def fill_certificates_dict(self): ###(
[2078]921        "return certificate data in  dict"
922        certificates_brains = self.portal_catalog(portal_type ='Certificate')
923        d = {}
924        for cb in certificates_brains:
925            certificate_doc = cb.getObject().getContent()
926            cb_path = cb.getPath().split('/')
927            ld = {}
928            ld['faculty'] = cb_path[-4]
929            ld['department'] = cb_path[-3]
930            ld['end_level'] = getattr(certificate_doc,'end_level','999')
[3348]931            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
[2078]932            d[cb.getId] = ld
933        return d
[2084]934    ###)
935
[2078]936    def get_from_doc_department(self,doc,cached_data={}): ###(
[1620]937        "return the students department"
[1700]938        if doc is None:
[1620]939            return None
[3354]940        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
[3348]941            return self._v_certificates[doc.study_course]['department']
[1700]942        certificate_res = self.portal_catalog(id = doc.study_course)
[1620]943        if len(certificate_res) != 1:
944            return None
945        return certificate_res[0].getPath().split('/')[-3]
946
[2078]947    def get_from_doc_faculty(self,doc,cached_data={}):
[1700]948        "return the students faculty"
949        if doc is None:
[1620]950            return None
[3354]951        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
[3348]952            return self._v_certificates[doc.study_course]['faculty']
[1700]953        certificate_res = self.portal_catalog(id = doc.study_course)
954        if len(certificate_res) != 1:
955            return None
956        return certificate_res[0].getPath().split('/')[-4]
[1620]957
[2099]958    def get_from_doc_end_level(self,doc,cached_data={}):
[2069]959        "return the students end_level"
960        if doc is None:
961            return None
[3354]962        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
[3348]963            return self._v_certificates[doc.study_course]['end_level']
[2069]964        certificate_res = self.portal_catalog(id = doc.study_course)
965        if len(certificate_res) != 1:
966            return None
967        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
968
[2078]969    def get_from_doc_level(self,doc,cached_data={}):
[1700]970        "return the students level"
971        if doc is None:
[1620]972            return None
[1700]973        return getattr(doc,'current_level',None)
[1620]974
[3780]975    #def get_from_doc_mode(self,doc,cached_data={}):
976    #    "return the students mode"
977    #    if doc is None:
978    #        return None
979    #    cm = getattr(doc,'current_mode',None)
980    #    return cm
981   
[2078]982    def get_from_doc_mode(self,doc,cached_data={}):
[1705]983        "return the students mode"
[1700]984        if doc is None:
[1620]985            return None
[3780]986        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
987            return self._v_certificates[doc.study_course]['study_mode']
988        certificate_res = self.portal_catalog(id = doc.study_course)
989        if len(certificate_res) != 1:
990            return None
991        return getattr(certificate_res[0].getObject().getContent(),'study_mode','unknown')   
[1625]992
[1749]993
[5200]994    def get_from_doc_marit_stat(self,doc,cached_data={}):
995        "return the students marit_stat"
996        if doc is None:
997            return None
998        ms = getattr(doc,'marit_stat',None)
999        if ms == True:
1000            return 'married'
1001        elif ms == False:
1002            return 'single'
1003        else:
1004            return 'undefined'
1005           
1006    def get_from_doc_date_of_birth(self,doc,cached_data={}):
1007        "return the students date of birth"
1008        if doc is None:
1009            return None
1010        return getattr(doc,'birthday',None)           
1011
[2078]1012    def get_from_doc_session(self,doc,cached_data={}):
[1705]1013        "return the students current_session"
1014        if doc is None:
1015            return None
1016        return getattr(doc,'current_session',None)
1017
[2078]1018    def get_from_doc_entry_session(self,doc,cached_data={}):
[1700]1019        "return the students entry_session"
1020        if doc is None:
[1620]1021            return None
[1705]1022        es = getattr(doc,'entry_session',None)
[3958]1023        if es is not None and len(es) < 3:
[1705]1024            return es
[3743]1025        elif len(es) == 9:
1026            return es[2:4]   
[1700]1027        try:
1028            digit = int(doc.jamb_reg_no[0])
1029        except:
[1986]1030            return "-1"
[3958]1031        if digit < 9:
[1700]1032            return "0%c" % doc.jamb_reg_no[0]
1033        return "9%c" % doc.jamb_reg_no[0]
1034
[2078]1035    def get_from_doc_course(self,doc,cached_data={}):
[1620]1036        "return the students study_course"
[1700]1037        if doc is None:
[1620]1038            return None
[1700]1039        return getattr(doc,'study_course',None)
[1620]1040
[2078]1041    def get_from_doc_name(self,doc,cached_data={}):
[1620]1042        "return the students name from the personal"
[1700]1043        if doc is None:
[1620]1044            return None
1045        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
1046
[2078]1047    def get_from_doc_verdict(self,doc,cached_data={}):
[1700]1048        "return the students study_course"
1049        if doc is None:
[1620]1050            return None
[1700]1051        return getattr(doc,'current_verdict',None)
[1702]1052    ###)
[1620]1053
[1702]1054    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
[3781]1055        if not hasattr(self,'_v_certificates'):
1056            self._v_certificates = self.fill_certificates_dict()
[1702]1057        if isinstance(name, str):
1058            name = (name,)
[1749]1059        reindextypes = {}
[1702]1060        reindex_special = []
1061        for n in name:
[3781]1062            if n in ("review_state"):
[1702]1063                reindex_special.append(n)
1064            else:
1065                for pt in self.affected_types.keys():
[1707]1066                    if n in self.affected_types[pt]['fields']:
[1702]1067                        if reindextypes.has_key(pt):
1068                            reindextypes[pt].append(n)
1069                        else:
1070                            reindextypes[pt]= [n]
1071                        break
[3781]1072        #cached_data = {}
1073        #if set(name).intersection(set(('faculty','department','end_level','mode'))):
1074        #    cached_data = self.fill_certificates_dict()
[1702]1075        students = self.portal_catalog(portal_type="Student")
[1954]1076        if hasattr(self,'portal_catalog_real'):
1077            aq_portal = self.portal_catalog_real.evalAdvancedQuery
1078        else:
1079            aq_portal = self.portal_catalog.evalAdvancedQuery
[1702]1080        num_objects = len(students)
1081        if pghandler:
1082            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1083        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
[2084]1084        #import pdb;pdb.set_trace()
[1702]1085        for i in xrange(num_objects):
1086            if pghandler: pghandler.report(i)
1087            student_brain = students[i]
[1707]1088            student_object = student_brain.getObject()
[1702]1089            data = {}
1090            modified = False
1091            sid = data['id'] = student_brain.getId
1092            if reindex_special and 'review_state' in reindex_special:
1093                modified = True
[3665]1094                data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
[1707]1095            sub_objects = False
1096            for pt in reindextypes.keys():
[1702]1097                modified = True
[1707]1098                try:
1099                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1100                    sub_objects = True
1101                except:
1102                    continue
[2084]1103                for field in set(name).intersection(self.affected_types[pt]['fields']):
[1707]1104                    if hasattr(self,'get_from_doc_%s' % field):
[3781]1105                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc)
[1707]1106                    else:
1107                        data[field] = getattr(doc,field)
1108            if not sub_objects and noattr:
1109                import_res = self.returning_import(id = sid)
1110                if not import_res:
1111                    continue
1112                import_record = import_res[0]
1113                data['matric_no'] = import_record.matric_no
1114                data['sex'] = import_record.Sex == 'F'
1115                data['name'] = "%s %s %s" % (import_record.Firstname,
1116                                             import_record.Middlename,
1117                                             import_record.Lastname)
[1815]1118                data['jamb_reg_no'] = import_record.Entryregno
[1702]1119            if modified:
1120                self.modifyRecord(**data)
1121        if pghandler: pghandler.finish()
1122    ###)
[1620]1123
1124    def refreshCatalog(self, clear=0, pghandler=None): ###(
1125        """ re-index everything we can find """
1126        students_folder = self.portal_url.getPortalObject().campus.students
1127        if clear:
[1724]1128            self._catalog.clear()
[1700]1129        students = self.portal_catalog(portal_type="Student")
1130        num_objects = len(students)
[3781]1131        #cached_data = self.fill_certificates_dict()
1132        if not hasattr(self,'_v_certificates'):
1133            self._v_certificates = self.fill_certificates_dict()
[1620]1134        if pghandler:
1135            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1136        for i in xrange(num_objects):
1137            if pghandler: pghandler.report(i)
[1700]1138            student_brain = students[i]
1139            spath = student_brain.getPath()
[1727]1140            student_object = student_brain.getObject()
[1620]1141            data = {}
[1700]1142            sid = data['id'] = student_brain.getId
[3665]1143            #data['review_state'] = student_brain.review_state
1144            data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
[1707]1145            sub_objects = False
1146            for pt in self.affected_types.keys():
1147                modified = True
1148                try:
1149                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1150                    sub_objects = True
1151                except:
[1727]1152                    #from pdb import set_trace;set_trace()
[1707]1153                    continue
1154                for field in self.affected_types[pt]['fields']:
1155                    if hasattr(self,'get_from_doc_%s' % field):
[2078]1156                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1157                                                                              cached_data=cached_data)
[1707]1158                    else:
[1727]1159                        data[field] = getattr(doc,field,None)
1160            if not sub_objects:
[1700]1161                import_res = self.returning_import(id = sid)
1162                if not import_res:
[1620]1163                    continue
[1700]1164                import_record = import_res[0]
1165                data['matric_no'] = import_record.matric_no
1166                data['sex'] = import_record.Sex == 'F'
1167                data['name'] = "%s %s %s" % (import_record.Firstname,
1168                                             import_record.Middlename,
1169                                             import_record.Lastname)
[1815]1170                data['jamb_reg_no'] = import_record.Entryregno
[1700]1171            self.addRecord(**data)
[1620]1172        if pghandler: pghandler.finish()
1173    ###)
1174
[1700]1175    security.declarePrivate('notify_event_listener') ###(
[1620]1176    def notify_event_listener(self,event_type,object,infos):
1177        "listen for events"
[1716]1178        if not infos.has_key('rpath'):
1179            return
[1702]1180        pt = getattr(object,'portal_type',None)
1181        mt = getattr(object,'meta_type',None)
[1954]1182        students_catalog = self
[1702]1183        data = {}
1184        if pt == 'Student' and\
1185           mt == 'CPS Proxy Folder' and\
1186           event_type.startswith('workflow'):
1187            data['id'] = object.getId()
1188            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1189            students_catalog.modifyRecord(**data)
1190            return
[1700]1191        rpl = infos['rpath'].split('/')
[2396]1192        if pt == 'Student' and mt == 'CPS Proxy Folder':
[1700]1193            student_id = object.id
[2396]1194            if event_type == "sys_add_object":
1195                try:
1196                    self.addRecord(id = student_id)
1197                except ValueError:
1198                    pass
1199                return
1200            elif event_type == 'sys_del_object':
1201                self.deleteRecord(student_id)
[1716]1202        if pt not in self.affected_types.keys():
[1700]1203            return
[1716]1204        if event_type not in ('sys_modify_object'):
1205            return
[1700]1206        if mt == 'CPS Proxy Folder':
1207            return
[3354]1208        if not hasattr(self,'_v_certificates'):
1209            self._v_certificates = self.fill_certificates_dict()
[1716]1210        for field in self.affected_types[pt]['fields']:
[1700]1211            if hasattr(self,'get_from_doc_%s' % field):
1212                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1213            else:
1214                data[field] = getattr(object,field)
1215        data['id'] = rpl[2]
[1716]1216        self.modifyRecord(**data)
[1700]1217    ###)
[1620]1218
[1625]1219
[971]1220InitializeClass(StudentsCatalog)
1221
[1146]1222###)
1223
[3354]1224class CertificatesCatalog(WAeUPTable): ###(
1225    security = ClassSecurityInfo()
1226
1227    meta_type = 'WAeUP Certificates Catalog'
1228    name =  "certificates_catalog"
1229    key = "code"
1230    def __init__(self,name=None):
1231        if name ==  None:
1232            name =  self.name
1233        WAeUPTable.__init__(self, name)
1234
1235    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1236        """ clear the catalog, then re-index everything """
1237
1238        elapse = time.time()
1239        c_elapse = time.clock()
1240
1241        pgthreshold = self._getProgressThreshold()
1242        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1243        self.refreshCatalog(clear=1, pghandler=handler)
1244
1245        elapse = time.time() - elapse
1246        c_elapse = time.clock() - c_elapse
1247
1248        RESPONSE.redirect(
1249            URL1 +
1250            '/manage_catalogAdvanced?manage_tabs_message=' +
1251            urllib.quote('Catalog Updated \n'
1252                         'Total time: %s\n'
1253                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1254    ###)
1255
1256    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1257        if isinstance(name, str):
1258            name = (name,)
1259        certificates = self.portal_catalog(portal_type="Certificate")
[3497]1260        num_objects = len(certificates)
[3354]1261        if pghandler:
1262            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1263        for i in xrange(num_objects):
1264            if pghandler: pghandler.report(i)
1265            certificate_brain = certificates[i]
1266            certificate_object = certificate_brain.getObject()
1267            pl = certificate_brain.getPath().split('/')
1268            data = {}
1269            cid = data[self.key] = certificate_brain.getId
1270            data['faculty'] = pl[-4]
1271            data['department'] = pl[-3]
1272            doc = certificate_object.getContent()
1273            for field in name:
1274                if field not in (self.key,'faculty','department'):
1275                    data[field] = getattr(doc,field)
1276            self.modifyRecord(**data)
1277        if pghandler: pghandler.finish()
1278    ###)
1279
1280    def refreshCatalog(self, clear=0, pghandler=None): ###(
1281        """ re-index everything we can find """
1282        if clear:
1283            self._catalog.clear()
1284        certificates = self.portal_catalog(portal_type="Certificate")
1285        num_objects = len(certificates)
1286        if pghandler:
1287            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1288        #from pdb import set_trace;set_trace()
1289        for i in xrange(num_objects):
1290            if pghandler: pghandler.report(i)
1291            certificate_brain = certificates[i]
1292            certificate_doc = certificate_brain.getObject().getContent()
1293            pl = certificate_brain.getPath().split('/')
1294            data = {}
1295            for field in self.schema():
1296                data[field] = getattr(certificate_doc,field,None)
1297            data[self.key] = certificate_brain.getId
1298            ai = pl.index('academics')
1299            data['faculty'] = pl[ai +1]
1300            data['department'] = pl[ai +2]
1301            if clear:
1302                self.addRecord(**data)
1303            else:
1304                self.modifyRecord(**data)
1305        if pghandler: pghandler.finish()
1306    ###)
1307
1308    security.declarePrivate('notify_event_listener') ###(
1309    def notify_event_listener(self,event_type,object,infos):
1310        "listen for events"
1311        if not infos.has_key('rpath'):
1312            return
1313        pt = getattr(object,'portal_type',None)
1314        mt = getattr(object,'meta_type',None)
1315        if pt != 'Certificate':
1316            return
1317        data = {}
1318        rpl = infos['rpath'].split('/')
1319        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1320            return
1321        certificate_id = object.getId()
1322        data[self.key] = certificate_id
1323        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1324            try:
1325                self.addRecord(**data)
1326            except ValueError:
1327                return
1328            certificate_id = object.getId()
1329            doc = object.getContent()
1330            if doc is None:
1331                return
1332            for field in self.schema():
1333                data[field] = getattr(doc,field,None)
1334            data[self.key] = certificate_id
1335            ai = rpl.index('academics')
1336            data['faculty'] = rpl[ai +1]
1337            data['department'] = rpl[ai +2]
1338            self.modifyRecord(**data)
1339            return
1340        if event_type == "sys_del_object":
1341            self.deleteRecord(certificate_id)
1342            return
1343        if event_type == "sys_modify_object" and mt == 'Certificate':
1344            #from pdb import set_trace;set_trace()
1345            for field in self.schema():
1346                data[field] = getattr(object,field,None)
1347            certificate_id = object.aq_parent.getId()
1348            data[self.key] = certificate_id
1349            ai = rpl.index('academics')
1350            data['faculty'] = rpl[ai +1]
1351            data['department'] = rpl[ai +2]
1352            self.modifyRecord(**data)
1353    ###)
1354
1355
1356InitializeClass(CertificatesCatalog)
1357###)
1358
[1146]1359class CoursesCatalog(WAeUPTable): ###(
[1716]1360    security = ClassSecurityInfo()
[1146]1361
1362    meta_type = 'WAeUP Courses Catalog'
[2094]1363    name =  "courses_catalog"
[1146]1364    key = "code"
[2094]1365    def __init__(self,name=None):
1366        if name ==  None:
1367            name =  self.name
1368        WAeUPTable.__init__(self, name)
[1146]1369
[1716]1370    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1371        """ clear the catalog, then re-index everything """
[1146]1372
[1716]1373        elapse = time.time()
1374        c_elapse = time.clock()
1375
1376        pgthreshold = self._getProgressThreshold()
1377        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1378        self.refreshCatalog(clear=1, pghandler=handler)
1379
1380        elapse = time.time() - elapse
1381        c_elapse = time.clock() - c_elapse
1382
1383        RESPONSE.redirect(
1384            URL1 +
1385            '/manage_catalogAdvanced?manage_tabs_message=' +
1386            urllib.quote('Catalog Updated \n'
1387                         'Total time: %s\n'
1388                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1389    ###)
1390
1391    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1392        if isinstance(name, str):
1393            name = (name,)
1394        courses = self.portal_catalog(portal_type="Course")
1395        num_objects = len(courses)
1396        if pghandler:
1397            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1398        for i in xrange(num_objects):
1399            if pghandler: pghandler.report(i)
1400            course_brain = courses[i]
1401            course_object = course_brain.getObject()
1402            pl = course_brain.getPath().split('/')
1403            data = {}
1404            cid = data[self.key] = course_brain.getId
1405            data['faculty'] = pl[-4]
1406            data['department'] = pl[-3]
1407            doc = course_object.getContent()
1408            for field in name:
1409                if field not in (self.key,'faculty','department'):
1410                    data[field] = getattr(doc,field)
1411            self.modifyRecord(**data)
1412        if pghandler: pghandler.finish()
1413    ###)
1414
1415    def refreshCatalog(self, clear=0, pghandler=None): ###(
1416        """ re-index everything we can find """
[1724]1417        if clear:
1418            self._catalog.clear()
[1716]1419        courses = self.portal_catalog(portal_type="Course")
1420        num_objects = len(courses)
1421        if pghandler:
1422            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
[1724]1423        #from pdb import set_trace;set_trace()
[1716]1424        for i in xrange(num_objects):
1425            if pghandler: pghandler.report(i)
1426            course_brain = courses[i]
[1724]1427            course_doc = course_brain.getObject().getContent()
[1716]1428            pl = course_brain.getPath().split('/')
1429            data = {}
[1724]1430            for field in self.schema():
[1749]1431                data[field] = getattr(course_doc,field,None)
[1716]1432            data[self.key] = course_brain.getId
[1724]1433            ai = pl.index('academics')
1434            data['faculty'] = pl[ai +1]
1435            data['department'] = pl[ai +2]
1436            if clear:
1437                self.addRecord(**data)
1438            else:
1439                self.modifyRecord(**data)
[1716]1440        if pghandler: pghandler.finish()
1441    ###)
1442
1443    security.declarePrivate('notify_event_listener') ###(
1444    def notify_event_listener(self,event_type,object,infos):
1445        "listen for events"
1446        if not infos.has_key('rpath'):
1447            return
1448        pt = getattr(object,'portal_type',None)
1449        mt = getattr(object,'meta_type',None)
1450        if pt != 'Course':
1451            return
1452        data = {}
1453        rpl = infos['rpath'].split('/')
1454        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1455            return
1456        course_id = object.getId()
1457        data[self.key] = course_id
[1724]1458        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
[1716]1459            try:
1460                self.addRecord(**data)
1461            except ValueError:
[1724]1462                return
1463            course_id = object.getId()
1464            doc = object.getContent()
1465            if doc is None:
1466                return
1467            for field in self.schema():
[1749]1468                data[field] = getattr(doc,field,None)
[1724]1469            data[self.key] = course_id
1470            ai = rpl.index('academics')
1471            data['faculty'] = rpl[ai +1]
1472            data['department'] = rpl[ai +2]
1473            self.modifyRecord(**data)
1474            return
[1716]1475        if event_type == "sys_del_object":
1476            self.deleteRecord(course_id)
[1724]1477            return
[1716]1478        if event_type == "sys_modify_object" and mt == 'Course':
[1724]1479            #from pdb import set_trace;set_trace()
[1716]1480            for field in self.schema():
[1749]1481                data[field] = getattr(object,field,None)
[1716]1482            course_id = object.aq_parent.getId()
1483            data[self.key] = course_id
[1724]1484            ai = rpl.index('academics')
1485            data['faculty'] = rpl[ai +1]
1486            data['department'] = rpl[ai +2]
[1716]1487            self.modifyRecord(**data)
1488    ###)
1489
1490
[1146]1491InitializeClass(CoursesCatalog)
[1151]1492###)
[1146]1493
[2084]1494class CourseResults(WAeUPTable): ###(
[2069]1495    security = ClassSecurityInfo()
1496
1497    meta_type = 'WAeUP Results Catalog'
1498    name = "course_results"
[2084]1499    key = "key" #student_id + level + course_id
[2094]1500    def __init__(self,name=None):
1501        if name ==  None:
1502            name = self.name
1503        WAeUPTable.__init__(self, name)
[2084]1504        self._queue = []
[2099]1505
[2094]1506    def addMultipleRecords(self, records): ###(
1507        """add many records"""
[3362]1508        existing_uids = []
[2094]1509        for data in records:
[3362]1510            uid = "%(student_id)s|%(level_id)s|%(course_id)s" % data
[2094]1511            data['%s' % self.key] = uid
[3362]1512            query = Eq(self.key, uid)
1513            res = self.course_results.evalAdvancedQuery(query)
[2094]1514            if len(res) > 0:
[3362]1515                rec = res[0]
1516                equal = True
1517                for attr in ('student_id','level_id','course_id'):
1518                    if getattr(rec,attr,'') != data[attr]:
1519                        equal = False
1520                        break
1521                if equal:
1522                    existing_uids += uid,
1523                    continue
[2094]1524            self.catalog_object(dict2ob(data), uid=uid)
[3362]1525        return existing_uids
[2094]1526    ###)
1527
[2434]1528    def deleteResultsHere(self,level_id,student_id): ###(
1529        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1530        course_results = self.course_results.evalAdvancedQuery(query)
[3362]1531        #import pdb;pdb.set_trace()
[2434]1532        for result in course_results:
1533            self.deleteRecord(result.key)
[2084]1534    ###)
1535
[2434]1536    def moveResultsHere(self,level,student_id): ###(
1537        #import pdb;pdb.set_trace()
1538        level_id = level.getId()
1539        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1540        course_results = self.course_results.evalAdvancedQuery(query)
[2437]1541        existing_courses = [cr.code for cr in course_results]
[2434]1542        to_delete = []
1543        for code,obj in level.objectItems():
[2437]1544            to_delete.append(code)
[2434]1545            carry_over = False
1546            if code.endswith('_co'):
1547                carry_over = True
[2437]1548                code  = code[:-3]
[2434]1549            if code in existing_courses:
[2094]1550                continue
[2434]1551            course_result_doc = obj.getContent()
[2094]1552            data = {}
[2434]1553            course_id = code
[2094]1554            for field in self.schema():
1555                data[field] = getattr(course_result_doc,field,'')
1556            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
[2099]1557            data['student_id'] = student_id
1558            data['level_id'] = level_id
[2439]1559            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
[2442]1560            data['session_id'] = session_id
[2434]1561            #data['queue_status'] = OBJECT_CREATED
[2099]1562            data['code'] = course_id
[2434]1563            data['carry_over'] = carry_over
[2094]1564            self.catalog_object(dict2ob(data), uid=key)
[2434]1565        level.manage_delObjects(to_delete)
1566    ###)
1567
1568    def getCourses(self,student_id,level_id): ###(
[2094]1569        query = Eq('student_id',student_id) & Eq('level_id', level_id)
[2434]1570        course_results = self.course_results.evalAdvancedQuery(query)
1571        carry_overs = []
[2606]1572        normal1 = []
1573        normal2 = []
[2761]1574        normal3 = []
[2757]1575        total_credits = 0
1576        gpa = 0
[2434]1577        for brain in course_results:
1578            d = {}
[2781]1579
[2761]1580            for field in self.schema():
[3707]1581                d[field] = getattr(brain,field,None)
1582                if repr(d[field]) == 'Missing.Value':
1583                    d[field] = ''
[2761]1584            d['weight'] = ''
1585            d['grade'] = ''
1586            d['score'] = ''
1587
[2750]1588            if str(brain.credits).isdigit():
[2757]1589                credits = int(brain.credits)
1590                total_credits += credits
[2780]1591                score = getattr(brain,'score',0)
1592                if score and str(score).isdigit() and int(score) > 0:
1593                    score = int(score)
[3707]1594                    grade,weight = self.getGradesFromScore(score,'')
[2781]1595                    gpa += weight * credits
1596                    d['weight'] = weight
1597                    d['grade'] = grade
1598                    d['score'] = score
[3675]1599
[3707]1600            #if str(brain.ca1).isdigit() and str(brain.ca2).isdigit() and str(brain.exam).isdigit():
1601            #    d['score_calc'] = int(brain.ca1) + int(brain.ca2) + int(brain.exam)
1602            #else:
1603            #    d['score_calc'] = ''
1604            try:
1605                d['score_calc'] = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1606            except:
[3675]1607                d['score_calc'] = ''
1608
[3707]1609            if d['score_calc']:
1610                grade = self.getGradesFromScore(d['score_calc'],level_id)
1611                d['grade'] = grade
1612
[2757]1613            d['coe'] = ''
[2434]1614            if brain.core_or_elective:
1615                d['coe'] = 'Core'
[2757]1616            elif brain.core_or_elective == False:
1617                d['coe'] = 'Elective'
[2434]1618            id = code = d['id'] = brain.code
1619            d['code'] = code
[2864]1620            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1621            if res:
1622                course = res[0]
1623                d['title'] = course.title
1624                # The courses_catalog contains strings and integers in its semester field.
1625                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1626                d['semester'] = str(course.semester)
1627            else:
[2866]1628                d['title'] = "Course has been removed from course list"
[2864]1629                d['semester'] = ''
[2448]1630            if brain.carry_over:
[2761]1631                d['coe'] = 'CO'
[2434]1632                carry_overs.append(d)
1633            else:
[2614]1634                if d['semester'] == '1':
[2606]1635                    normal1.append(d)
[2614]1636
1637                elif d['semester'] == '2':
[2606]1638                    normal2.append(d)
1639                else:
1640                    normal3.append(d)
1641        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1642        #                                "%(semester)s%(code)s" % y))
[2503]1643        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
[2460]1644                                             "%(semester)s%(code)s" % y))
[2757]1645        return total_credits,gpa,carry_overs,normal1,normal2,normal3
[2094]1646    ###)
1647
[4693]1648   
1649    # for transcript only
[3602]1650    def getAllCourses(self,student_id): ###(
1651        query = Eq('student_id',student_id)
1652        course_results = self.course_results.evalAdvancedQuery(query)
[3603]1653        courses = []
[3602]1654        for brain in course_results:
1655            d = {}
1656
1657            for field in self.schema():
1658                d[field] = getattr(brain,field,'')
1659
1660            d['weight'] = ''
1661            d['grade'] = ''
1662            d['score'] = ''
1663
1664            if str(brain.credits).isdigit():
1665                credits = int(brain.credits)
1666                score = getattr(brain,'score',0)
1667                if score and str(score).isdigit() and int(score) > 0:
1668                    score = int(score)
[4693]1669                    grade,weight = self.getGradesFromScore(score,'')
[3602]1670                    d['weight'] = weight
1671                    d['grade'] = grade
1672                    d['score'] = score
1673            d['coe'] = ''
1674            if brain.core_or_elective:
1675                d['coe'] = 'Core'
1676            elif brain.core_or_elective == False:
1677                d['coe'] = 'Elective'
1678            id = code = d['id'] = brain.code
1679            d['code'] = code
1680            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1681            if res:
1682                course = res[0]
1683                d['title'] = course.title
1684                # The courses_catalog contains strings and integers in its semester field.
1685                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1686                d['semester'] = str(course.semester)
1687            else:
1688                d['title'] = "Course has been removed from course list"
1689                d['semester'] = ''
1690            if brain.carry_over:
1691                d['coe'] = 'CO'
1692            courses.append(d)
1693        return courses
1694    ###)
[3841]1695   
[4036]1696    def getYearGroupAverage(self,session_id,level_id): ###(
1697        query = Eq('session_id',session_id) & Eq('level_id',level_id)
1698        course_results = self.course_results.evalAdvancedQuery(query)
1699        yga1 = 0
[4302]1700        yg1 = []
[4036]1701        counter1 = 0
1702        yga2 = 0
[4302]1703        yg2 = []
[4036]1704        counter2 = 0
1705        yga3 = 0
[4302]1706        yg3 = []
[4036]1707        counter3 = 0       
1708        #import pdb;pdb.set_trace()
1709        for brain in course_results:
1710            try:
[4302]1711                om = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1712                if not om > 0:
[4036]1713                    continue
1714                code = brain.code               
1715                res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1716                if res:
1717                    course = res[0]
1718                    # The courses_catalog contains strings and integers in its semester field.
1719                    # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1720                    semester = str(course.semester)
1721                else:
1722                    semester = ''
1723                if semester == '1':
1724                    counter1 += 1
[4302]1725                    yga1 += om
1726                    yg1.append(om)
[4036]1727                elif semester == '2':
1728                    counter2 += 1
[4302]1729                    yga2 += om     
1730                    yg2.append(om)   
[4036]1731                if semester == '3':
1732                    counter3 += 1
[4302]1733                    yga3 += om
1734                    yg3.append(om)
[4036]1735            except:
1736                continue               
1737        if counter1:
1738            yga1 /= counter1
1739            yga1 = '%.2f' % yga1   
1740        if counter2:
1741            yga2 /= counter2
1742            yga2 = '%.2f' % yga2   
1743        if counter3:
1744            yga3 /= counter3
1745            yga3 = '%.2f' % yga3                                   
[4302]1746        return yga1, yga2, yga3, counter1, counter2, counter3, yg1, yg2, yg3
[4036]1747    ###)
1748   
[4302]1749   
1750    #security.declarePublic("calculateCoursePosition")
1751    def calculateCoursePosition(self,session_id,level_id,code,score,semester=None):
1752        #"""calculate Course Position"""
1753        query = Eq('session_id',session_id) & Eq('level_id',level_id) & Eq('code',code)
1754        course_results = self.course_results.evalAdvancedQuery(query)
1755        ygc = []
1756        #import pdb;pdb.set_trace() 
1757        for brain in course_results:
1758            try:
1759                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
1760                    continue
1761                #code = brain.code   
1762                if semester:
1763                    res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1764                    if res:
1765                        course = res[0]
1766                        # The courses_catalog contains strings and integers in its semester field.
1767                        # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1768                        semester_from_course = str(course.semester)
1769                    else:
1770                        continue
1771                    if semester != semester_from_course:
1772                        continue
1773                ygc.append(float(brain.ca1) + float(brain.ca2) + float(brain.exam))
1774            except:
1775                continue     
1776        ygc.sort(reverse=True)
1777        if not len(ygc):
1778            return 'no result'
1779        #import pdb;pdb.set_trace()       
1780        for pos in range(len(ygc)):
1781            if ygc[pos] <= float(score):
1782                break
1783        output = {}   
1784        output['pos'] =  '%d of %d' % (pos+1,len(ygc))
1785        output['ygc'] = ygc
1786        return output
1787       
1788    security.declareProtected(ModifyPortalContent,"calculateAllCoursePositions")
1789    def calculateAllCoursePositions(self,session_id=None):
1790        """calculate All Course Positions"""
[4304]1791        logger = logging.getLogger('WAeUPTables.CourseResults.calculateAllCoursePositions')
[4306]1792        member = self.portal_membership.getAuthenticatedMember()
[4304]1793        logger.info('%s starts recalculation of positions in session %s' % (member,session_id))
[4302]1794        if session_id:
1795            query = Eq('session_id',session_id)
1796        else:
1797            return 'no session_id provided'
1798        course_results = self.course_results.evalAdvancedQuery(query)
1799        for brain in course_results:
1800            try:
1801                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
[5146]1802                    data = {}
1803                    data[self.key] = brain.key
1804                    data['pic'] = ''
1805                    self.modifyRecord(**data)                   
[4302]1806                    continue
1807                res = self.courses_catalog.evalAdvancedQuery(Eq('code',brain.code))
1808                if res:
1809                    course = res[0]
1810                    semester_from_course = str(course.semester)
1811                else:
1812                    continue                   
1813                score = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
[5143]1814                pic = self.calculateCoursePosition(session_id,brain.level_id,brain.code,score,semester_from_course)['pos']
[4302]1815                data = {}
1816                data[self.key] = brain.key
1817                data['pic'] = pic
1818                self.modifyRecord(**data)
1819            except:
[5146]1820                data = {}
1821                data[self.key] = brain.key
1822                data['pic'] = ''
1823                self.modifyRecord(**data)
[4304]1824                continue       
1825        logger.info('recalculation finished')             
[4302]1826        return 'ready'   
1827   
[3988]1828    def exportRemoveAllCourses(self,student_id,export=False,remove=False): ###(
[3841]1829        ""
1830        query = Eq('student_id',student_id)
1831        cr_catalog = self.course_results
1832        course_results = cr_catalog.evalAdvancedQuery(query)
1833        courses = []
1834        fields = self.schema()
1835        format = '"%(' + ')s","%('.join(fields) + ')s"'
1836        for brain in course_results:
1837            d = {}
1838            for field in fields:
1839                d[field] = getattr(brain,field,'')
1840            courses.append(format % d)
1841               
1842        if export:
1843            export_file = "%s/export/course_results_removed.csv" % (i_home)
[3989]1844            if not os.path.exists(export_file): 
1845                file_handler = open(export_file,"a")
1846                headline = ','.join(fields)
1847                file_handler.write(headline +'\n')
1848            else:
1849                file_handler = open(export_file,"a")
[3841]1850            for line in courses:
1851                file_handler.write(line +'\n')
[3602]1852
[3841]1853        if remove:
1854            for brain in course_results:
1855                key = getattr(brain,'key','')
1856                cr_catalog.deleteRecord(key)
1857       
1858        return courses
1859    ###)   
[3984]1860   
1861   
[3841]1862
[3602]1863InitializeClass(CourseResults)
1864###)
1865
[1625]1866class OnlinePaymentsImport(WAeUPTable): ###(
[1620]1867
1868    meta_type = 'WAeUP Online Payment Transactions'
[1625]1869    name = "online_payments_import"
[1620]1870    key = "order_id"
[2094]1871    def __init__(self,name=None):
1872        if name ==  None:
1873            name = self.name
1874        WAeUPTable.__init__(self, name)
[1620]1875
1876
[2069]1877InitializeClass(OnlinePaymentsImport)
[1620]1878###)
1879
[1151]1880class ReturningImport(WAeUPTable): ###(
[1146]1881
[1151]1882    meta_type = 'Returning Import Table'
1883    name = "returning_import"
[1146]1884    key = "matric_no"
[2094]1885    def __init__(self,name=None):
1886        if name ==  None:
1887            name = self.name
1888        WAeUPTable.__init__(self, name)
[1146]1889
1890
[1151]1891InitializeClass(ReturningImport)
1892###)
[1146]1893
1894class ResultsImport(WAeUPTable): ###(
1895
1896    meta_type = 'Results Import Table'
1897    name = "results_import"
1898    key = "key"
[2094]1899    def __init__(self,name=None):
1900        if name ==  None:
1901            name = self.name
1902        WAeUPTable.__init__(self, name)
[1146]1903
1904
1905InitializeClass(ResultsImport)
1906
1907###)
1908
1909class PaymentsCatalog(WAeUPTable): ###(
[2738]1910    security = ClassSecurityInfo()
[1146]1911
1912    meta_type = 'WAeUP Payments Catalog'
[2868]1913    name = "payments_catalog"
1914    key = "order_id"
[2094]1915    def __init__(self,name=None):
1916        if name ==  None:
1917            name = self.name
1918        WAeUPTable.__init__(self, name)
[1146]1919
[2859]1920
[2738]1921    security.declarePrivate('notify_event_listener') ###(
1922    def notify_event_listener(self,event_type,object,infos):
1923        "listen for events"
1924        if not infos.has_key('rpath'):
1925            return
1926        pt = getattr(object,'portal_type',None)
1927        mt = getattr(object,'meta_type',None)
1928        data = {}
[2904]1929        if pt != 'Payment':
1930            return
[3469]1931        if event_type == 'sys_del_object' and mt == 'CPS Proxy Folder':
1932            self.deleteRecord(object.getContent().order_id)
[2904]1933        if mt == 'CPS Proxy Folder':
[2911]1934            return # is handled only for the real object
[2738]1935        if event_type not in ('sys_modify_object'):
1936            return
1937        for field in self.schema():
[2859]1938            data[field] = getattr(object,field,'')
[2738]1939        rpl = infos['rpath'].split('/')
[2904]1940        #import pdb;pdb.set_trace()
1941        student_id = rpl[-4]
[2738]1942        data['student_id'] = student_id
[2907]1943        modified = False
[2859]1944        try:
1945            self.modifyRecord(**data)
[2907]1946            modified = True
[2859]1947        except KeyError:
[2926]1948            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1949            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1950            pass
[2907]1951        if not modified:
1952            try:
1953                self.addRecord(**data)
1954            except:
[2976]1955                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
[2907]1956                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1957        ###)
[1146]1958
[2738]1959
[3988]1960    def exportRemoveAllPayments(self,student_id,export=False,remove=False): ###(
[3984]1961        ""
1962        query = Eq('student_id',student_id)
1963        pm_catalog = self.payments_catalog
1964        payments = pm_catalog.evalAdvancedQuery(query)
1965        payments_dic = []
1966        fields = self.schema()
1967        format = '"%(' + ')s","%('.join(fields) + ')s"'
1968        for brain in payments:
1969            d = {}
1970            for field in fields:
1971                d[field] = getattr(brain,field,'')
1972            payments_dic.append(format % d)
1973               
1974        if export:
1975            export_file = "%s/export/payments_removed.csv" % (i_home)
[3989]1976            if not os.path.exists(export_file): 
1977                file_handler = open(export_file,"a")
1978                headline = ','.join(fields)
1979                file_handler.write(headline +'\n')
1980            else:
1981                file_handler = open(export_file,"a")
[3984]1982            for line in payments_dic:
1983                file_handler.write(line +'\n')
1984
1985        if remove:
1986            for brain in payments:
1987                order_id = getattr(brain,'order_id','')
1988                pm_catalog.deleteRecord(order_id)
1989       
1990        return payments_dic
1991    ###)   
1992
[1146]1993InitializeClass(PaymentsCatalog)
1994
1995###)
1996
[4302]1997class RemovedStudentIds(WAeUPTable): ###(
1998
1999    meta_type = 'WAeUP Removed StudentIds'
2000    name = "removed_student_ids"
2001    key = "id"
2002    def __init__(self,name=None):
2003        if name ==  None:
2004            name = self.name
2005        WAeUPTable.__init__(self, name)
2006
2007
2008InitializeClass(RemovedStudentIds)
2009
2010###)
2011
[414]2012# BBB:
2013AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.