source: WAeUP_SRP/trunk/WAeUPTables.py @ 5201

Last change on this file since 5201 was 5200, checked in by Henrik Bettermann, 15 years ago

add metadata (not indexes!) date_of_birth and marit_stat to students_catalog
fillMetadata.py: tool to fill metadata and/or reindex the students_catalog

  • Property svn:keywords set to Id
File size: 75.2 KB
RevLine 
[966]1#-*- mode: python; mode: fold -*-
[363]2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 5200 2010-05-20 10:08:17Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
[1620]24from Products.ZCatalog.ProgressHandler import ZLogHandler
[780]25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
[2094]27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
[1700]33import urllib
[1620]34import DateTime,time
[3989]35import csv,re,os
[780]36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
[2084]41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
[2845]43NOT_OCCUPIED = 'not_occupied'
[2084]44
[363]45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
[1146]56class WAeUPTable(ZCatalog): ###(
[834]57
[363]58    implements(IWAeUPTable)
[780]59    security = ClassSecurityInfo()
[2094]60    meta_type = None
[2099]61
[2094]62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
[2099]66
[2094]67    def refreshCatalog(self, clear=0, pghandler=None): ###(
[1620]68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
[2094]75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
[1620]78        """ clears the whole enchilada """
[1986]79
[1916]80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
[1620]84
[1916]85        self._catalog.clear()
[1620]86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
[1916]89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
[1620]90
[2094]91###)
92
[4244]93    def record2dict(self,fields,record,index): ###(
[2189]94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
[3018]97            v_dump = v
[2192]98            if key == 'sex':
[3017]99                if v == True:
100                    v_dump = 'F'
[3018]101                elif v == False:
[3017]102                    v_dump = 'M'
103                d[key] = v_dump
[2192]104            elif v:
[4244]105                if index == 'translate':
106                    if key == 'lga':
107                        v_dump = self.portal_vocabularies.local_gov_areas.get(v)
108                        if not v_dump:
109                            v_dump = v
110                    elif key == 'aos':
111                        v_dump = self.portal_vocabularies.aos.get(v)
[3017]112                d[key] = v_dump
[2189]113            else:
114                d[key] = ''
115        return d
[2191]116
[2632]117###)
118
[2094]119    def addRecord(self, **data): ###(
[502]120        # The uid is the same as "bed".
121        uid = data[self.key]
122        res = self.searchResults({"%s" % self.key : uid})
123        if len(res) > 0:
124            raise ValueError("More than one record with uid %s" % uid)
125        self.catalog_object(dict2ob(data), uid=uid)
126        return uid
[834]127
[2094]128###)
129
[363]130    def deleteRecord(self, uid):
131        self.uncatalog_object(uid)
[834]132
[2738]133    def getRecordByKey(self,key):
134        if not key:
135            return None
136        res = self.evalAdvancedQuery(Eq(self.key,key))
137        if res:
138            return res[0]
139        return None
140
[502]141    def searchAndSetRecord(self, **data):
142        raise NotImplemented
143
[2094]144    def modifyRecord(self, record=None, **data): ###(
[502]145        #records = self.searchResults(uid=uid)
146        uid = data[self.key]
[2069]147        if record is None:
148            records = self.searchResults({"%s" % self.key : uid})
149            if len(records) > 1:
150                # Can not happen, but anyway...
151                raise ValueError("More than one record with uid %s" % uid)
152            if len(records) == 0:
153                raise KeyError("No record for uid %s" % uid)
154            record = records[0]
[363]155        record_data = {}
156        for field in self.schema() + self.indexes():
157            record_data[field] = getattr(record, field)
158        # Add the updated data:
159        record_data.update(data)
160        self.catalog_object(dict2ob(record_data), uid)
161
[2094]162###)
163
164    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
[1062]165        if isinstance(name, str):
[2094]166            name =  (name,)
[1062]167        paths = self._catalog.uids.items()
168        i = 0
169        #import pdb;pdb.set_trace()
170        for p,rid in paths:
171            i += 1
172            metadata = self.getMetadataForRID(rid)
173            record_data = {}
174            for field in name:
175                record_data[field] = metadata.get(field)
176            uid = metadata.get(self.key)
177            self.catalog_object(dict2ob(record_data), uid, idxs=name,
178                                update_metadata=0)
[1082]179
[2094]180###)
181
182    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
[780]183    def exportAllRecords(self):
184        "export a WAeUPTable"
185        #import pdb;pdb.set_trace()
186        fields = [field for field in self.schema()]
187        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
188        csv = []
189        csv.append(','.join(['"%s"' % fn for fn in fields]))
190        for uid in self._catalog.uids:
191            records = self.searchResults({"%s" % self.key : uid})
192            if len(records) > 1:
193                # Can not happen, but anyway...
194                raise ValueError("More than one record with uid %s" % uid)
195            if len(records) == 0:
196                raise KeyError("No record for uid %s" % uid)
197            rec = records[0]
198            csv.append(format % rec)
199        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
200        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
[2094]201
202###)
203
[2189]204    security.declareProtected(ModifyPortalContent,"dumpAll")###(
[3757]205    def dumpAll(self,index=None,value=None):
[2189]206        """dump all data in the table to a csv"""
207        member = self.portal_membership.getAuthenticatedMember()
[2974]208        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
[2189]209        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
210        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
211        res_list = []
212        lines = []
213        if hasattr(self,"export_keys"):
214            fields = self.export_keys
215        else:
216            fields = []
217            for f in self.schema():
218                fields.append(f)
219        headline = ','.join(fields)
220        out = open(export_file,"wb")
221        out.write(headline +'\n')
222        out.close()
223        out = open(export_file,"a")
224        csv_writer = csv.DictWriter(out,fields,)
[3757]225        if index is not None and value is not None:
226            records = self.evalAdvancedQuery(Eq(index,value))
227        else:
228            records = self()
[2189]229        nr2export = len(records)
230        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
231        chunk = 2000
232        total = 0
233        start = DateTime.DateTime().timeTime()
234        start_chunk = DateTime.DateTime().timeTime()
235        for record in records:
236            not_all = False
[4244]237            d = self.record2dict(fields,record,index)
[2189]238            lines.append(d)
239            total += 1
240            if total and not total % chunk or total == len(records):
241                csv_writer.writerows(lines)
242                anz = len(lines)
243                logger.info("wrote %(anz)d  total written %(total)d" % vars())
244                end_chunk = DateTime.DateTime().timeTime()
245                duration = end_chunk-start_chunk
246                per_record = duration/anz
247                till_now = end_chunk - start
248                avarage_per_record = till_now/total
249                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
250                estimated_end = estimated_end.strftime("%H:%M:%S")
251                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
252                start_chunk = DateTime.DateTime().timeTime()
253                lines = []
254        end = DateTime.DateTime().timeTime()
255        logger.info('total time %6.2f m' % ((end-start)/60))
256        import os
257        filename, extension = os.path.splitext(export_file)
258        from subprocess import call
259        msg = "wrote %(total)d records to %(export_file)s" % vars()
[2561]260        #try:
261        #    retcode = call('gzip %s' % (export_file),shell=True)
262        #    if retcode == 0:
263        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
264        #except OSError, e:
265        #    retcode = -99
266        #    logger.info("zip failed with %s" % e)
[2189]267        logger.info(msg)
268        args = {'portal_status_message': msg}
269        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
270        url = self.REQUEST.get('URL2')
[4537]271        return 'ready'
272        #return self.REQUEST.RESPONSE.redirect(url)
[2189]273    ###)
274
[2185]275    security.declarePrivate("_import_old") ###(
276    def _import_old(self,filename,schema,layout, mode,logger):
[2094]277        "import data from csv"
278        import transaction
279        import random
280        pm = self.portal_membership
281        member = pm.getAuthenticatedMember()
282        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
283        import_fn = "%s/import/%s.csv" % (i_home,filename)
284        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
285        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
286        start = True
287        tr_count = 1
288        total_imported = 0
289        total_not_imported = 0
290        total = 0
291        iname =  "%s" % filename
[2112]292        not_imported = []
293        imported = []
294        valid_records = []
295        invalid_records = []
296        d = {}
297        d['mode'] = mode
298        d['imported'] = total_imported
299        d['not_imported'] = total_not_imported
300        d['valid_records'] = valid_records
301        d['invalid_records'] = invalid_records
302        d['import_fn'] = import_fn
303        d['imported_fn'] = imported_fn
304        d['not_imported_fn'] = not_imported_fn
[2094]305        if schema is None:
306            em = 'No schema specified'
307            logger.error(em)
[2112]308            return d
[2094]309        if layout is None:
310            em = 'No layout specified'
311            logger.error(em)
[2112]312            return d
[2094]313        validators = {}
314        for widget in layout.keys():
[2112]315            try:
316                validators[widget] = layout[widget].validate
317            except AttributeError:
318                logger.info('%s has no validate attribute' % widget)
319                return d
[2094]320        # if mode == 'edit':
321        #     importer = self.importEdit
322        # elif mode == 'add':
323        #     importer = self.importAdd
324        # else:
325        #     importer = None
326        try:
[2185]327            items = csv.DictReader(open(import_fn,"rb"),
328                                   dialect="excel",
329                                   skipinitialspace=True)
[2094]330        except:
331            em = 'Error reading %s.csv' % filename
332            logger.error(em)
333            return d
[2185]334        #import pdb;pdb.set_trace()
[2094]335        for item in items:
336            if start:
337                start = False
338                logger.info('%s starts import from %s.csv' % (member,filename))
339                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
[2185]340                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
341                                   dialect="excel",
342                                   skipinitialspace=True).next()
[2094]343                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
344                diff2schema = set(import_keys).difference(set(schema.keys()))
345                diff2layout = set(import_keys).difference(set(layout.keys()))
346                if diff2layout:
347                    em = "not ignorable key(s) %s found in heading" % diff2layout
348                    logger.info(em)
349                    return d
350                s = ','.join(['"%s"' % fn for fn in import_keys])
351                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
352                #s = '"id",' + s
353                open(imported_fn,"a").write(s + '\n')
354                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
355                format_error = format + ',"%(Error)s"'
356                #format = '"%(id)s",'+ format
357                adapters = [MappingStorageAdapter(schema, item)]
358            dm = DataModel(item, adapters,context=self)
359            ds = DataStructure(data=item,datamodel=dm)
360            error_string = ""
[2503]361            #import pdb;pdb.set_trace()
[2094]362            for k in import_keys:
363                if not validators[k](ds,mode=mode):
364                    error_string += " %s : %s" % (k,ds.getError(k))
365            # if not error_string and importer:
366            #     item.update(dm)
367            #     item['id'],error = importer(item)
368            #     if error:
369            #         error_string += error
370            if error_string:
371                item['Error'] = error_string
372                invalid_records.append(dm)
373                not_imported.append(format_error % item)
374                total_not_imported += 1
375            else:
376                em = format % item
377                valid_records.append(dm)
378                imported.append(em)
379                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
380                tr_count += 1
381                total_imported += 1
382            total += 1
383        if len(imported) > 0:
384            open(imported_fn,"a").write('\n'.join(imported))
385        if len(not_imported) > 0:
386            open(not_imported_fn,"a").write('\n'.join(not_imported))
387        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
388        d['imported'] = total_imported
389        d['not_imported'] = total_not_imported
390        d['valid_records'] = valid_records
391        d['invalid_records'] = invalid_records
392        d['imported_fn'] = imported_fn
393        d['not_imported_fn'] = not_imported_fn
394        #logger.info(em)
395        return d
[1935]396    ###)
[2185]397
398    security.declarePrivate("_import") ###(
399    def _import_new(self,csv_items,schema, layout, mode,logger):
400        "import data from csv.Dictreader Instance"
401        start = True
402        tr_count = 1
403        total_imported = 0
404        total_not_imported = 0
405        total = 0
406        iname =  "%s" % filename
407        not_imported = []
408        valid_records = []
409        invalid_records = []
410        duplicate_records = []
411        d = {}
412        d['mode'] = mode
413        d['valid_records'] = valid_records
414        d['invalid_records'] = invalid_records
415        d['invalid_records'] = duplicate_records
416        # d['import_fn'] = import_fn
417        # d['imported_fn'] = imported_fn
418        # d['not_imported_fn'] = not_imported_fn
419        validators = {}
420        for widget in layout.keys():
421            try:
422                validators[widget] = layout[widget].validate
423            except AttributeError:
424                logger.info('%s has no validate attribute' % widget)
425                return d
426        for item in csv_items:
427            if start:
428                start = False
429                logger.info('%s starts import from %s.csv' % (member,filename))
430                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
431                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
432                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
433                diff2schema = set(import_keys).difference(set(schema.keys()))
434                diff2layout = set(import_keys).difference(set(layout.keys()))
435                if diff2layout:
436                    em = "not ignorable key(s) %s found in heading" % diff2layout
437                    logger.info(em)
438                    return d
439                # s = ','.join(['"%s"' % fn for fn in import_keys])
440                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
441                # #s = '"id",' + s
442                # open(imported_fn,"a").write(s + '\n')
443                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
444                # format_error = format + ',"%(Error)s"'
445                # #format = '"%(id)s",'+ format
446                adapters = [MappingStorageAdapter(schema, item)]
447            dm = DataModel(item, adapters,context=self)
448            ds = DataStructure(data=item,datamodel=dm)
449            error_string = ""
450            for k in import_keys:
451                if not validators[k](ds,mode=mode):
452                    error_string += " %s : %s" % (k,ds.getError(k))
453            if error_string:
454                item['Error'] = error_string
455                #invalid_records.append(dm)
456                invalid_records.append(item)
457                total_not_imported += 1
458            else:
459                em = format % item
460                valid_records.append(dm)
461                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
462                tr_count += 1
463                total_imported += 1
464            total += 1
465        # if len(imported) > 0:
466        #     open(imported_fn,"a").write('\n'.join(imported))
467        # if len(not_imported) > 0:
468        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
469        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
470        d['imported'] = total_imported
471        d['not_imported'] = total_not_imported
472        d['valid_records'] = valid_records
473        d['invalid_records'] = invalid_records
474        return d
475    ###)
476
[2396]477    security.declarePublic("missingValue")###(
478    def missingValue(self):
479        from Missing import MV
480        return MV
481    ###)
[2094]482###)
[834]483
[1146]484class AccommodationTable(WAeUPTable): ###(
[834]485
[404]486    meta_type = 'WAeUP Accommodation Tool'
[2094]487    name = "portal_accommodation"
[502]488    key = "bed"
[3043]489    not_occupied = NOT_OCCUPIED
[2094]490    def __init__(self,name=None):
491        if name ==  None:
492            name = self.name
493        WAeUPTable.__init__(self, name)
[2866]494
[3772]495    def searchAndReserveBed(self, student_id,bed_type,random_order=False): ###(
[3406]496        logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
[2845]497        records = self.evalAdvancedQuery(Eq('student',student_id))
[3406]498        if len(records) == 1:
499            #return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
500            logger.info('%s found (reserved) bed %s' % (student_id,records[0].bed))
501            return -1,records[0].bed
502        elif len(records) > 1:
503            logger.info('%s found more than one (reserved) bed' % (student_id))
[3408]504            return -3,'more than one bed'
[2845]505        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
506        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
[635]507        if len(records) == 0:
[3408]508            logger.info('no bed %s available for %s' % (bed_type,student_id))
[3406]509            return -2,"no bed"
[3772]510        if random_order:
511            import random
512            bed_no = random.randint(0,len(records)-1)
513        else:
514            bed_no = 0
515        rec = records[bed_no]
[635]516        self.modifyRecord(bed=rec.bed,student=student_id)
[3406]517        logger.info('%s booked bed %s' % (student_id,rec.bed))
[635]518        return 1,rec.bed
[3043]519    ###)
[363]520
[834]521
[404]522InitializeClass(AccommodationTable)
[411]523
[1146]524###)
525
526class PinTable(WAeUPTable): ###(
[1030]527    from ZODB.POSException import ConflictError
[2973]528    security = ClassSecurityInfo()
[440]529    meta_type = 'WAeUP Pin Tool'
[2094]530    name = "portal_pins"
[502]531    key = 'pin'
[2859]532
[2094]533    def __init__(self,name=None):
534        if name ==  None:
535            name = self.name
536        WAeUPTable.__init__(self, name)
[1082]537
[2973]538    security.declareProtected(ModifyPortalContent,"dumpAll")###(
[4546]539    def dumpAll(self,include_unused=None,index=None):
[2973]540        """dump all data in the table to a csv"""
541        member = self.portal_membership.getAuthenticatedMember()
[2974]542        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
[2973]543        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
544        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
545        res_list = []
546        lines = []
547        if hasattr(self,"export_keys"):
548            fields = self.export_keys
549        else:
550            fields = []
551            for f in self.schema():
552                fields.append(f)
553        headline = ','.join(fields)
554        out = open(export_file,"wb")
555        out.write(headline +'\n')
556        out.close()
557        out = open(export_file,"a")
558        csv_writer = csv.DictWriter(out,fields,)
559        if include_unused is not None and str(member) not in ('admin','joachim'):
560            logger.info('%s tries to dump pintable with unused pins' % (member))
561            return
562        if include_unused is not None:
563            records = self()
564        else:
565            records = self.evalAdvancedQuery(~Eq('student',''))
566        nr2export = len(records)
567        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
568        chunk = 2000
569        total = 0
570        start = DateTime.DateTime().timeTime()
571        start_chunk = DateTime.DateTime().timeTime()
572        for record in records:
573            not_all = False
[4546]574            d = self.record2dict(fields,record,index)
[2973]575            lines.append(d)
576            total += 1
577            if total and not total % chunk or total == len(records):
578                csv_writer.writerows(lines)
579                anz = len(lines)
580                logger.info("wrote %(anz)d  total written %(total)d" % vars())
581                end_chunk = DateTime.DateTime().timeTime()
582                duration = end_chunk-start_chunk
583                per_record = duration/anz
584                till_now = end_chunk - start
585                avarage_per_record = till_now/total
586                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
587                estimated_end = estimated_end.strftime("%H:%M:%S")
588                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
589                start_chunk = DateTime.DateTime().timeTime()
590                lines = []
591        end = DateTime.DateTime().timeTime()
592        logger.info('total time %6.2f m' % ((end-start)/60))
593        import os
594        filename, extension = os.path.splitext(export_file)
595        from subprocess import call
596        msg = "wrote %(total)d records to %(export_file)s" % vars()
597        #try:
598        #    retcode = call('gzip %s' % (export_file),shell=True)
599        #    if retcode == 0:
600        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
601        #except OSError, e:
602        #    retcode = -99
603        #    logger.info("zip failed with %s" % e)
604        logger.info(msg)
605        args = {'portal_status_message': msg}
606        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
607        url = self.REQUEST.get('URL2')
608        return self.REQUEST.RESPONSE.redirect(url)
609    ###)
[1082]610
[2973]611
612
[710]613    def searchAndSetRecord(self, uid, student_id,prefix):
[2191]614
615        # The following line must be activated after resetting the
616        # the portal_pins table. This is to avoid duplicate entries
[2184]617        # and disable duplicate payments.
[2191]618
[2184]619        #student_id = student_id.upper()
620
[2716]621        #records = self.searchResults(student = student_id)
[2579]622        #if len(records) > 0 and prefix in ('CLR','APP'):
623        #    for r in records:
624        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
625        #            return -2
[502]626        records = self.searchResults({"%s" % self.key : uid})
627        if len(records) > 1:
628            # Can not happen, but anyway...
629            raise ValueError("More than one record with uid %s" % uid)
630        if len(records) == 0:
[2766]631            return -1,None
[502]632        record = records[0]
633        if record.student == "":
634            record_data = {}
635            for field in self.schema() + self.indexes():
636                record_data[field] = getattr(record, field)
637            # Add the updated data:
[635]638            record_data['student'] = student_id
[1030]639            try:
640                self.catalog_object(dict2ob(record_data), uid)
[2766]641                return 1,record
[1030]642            except ConflictError:
[2766]643                return 2,record
[990]644        if record.student.upper() != student_id.upper():
[2766]645            return 0,record
[997]646        if record.student.upper() == student_id.upper():
[2766]647            return 2,record
648        return -3,record
[440]649InitializeClass(PinTable)
[1146]650###)
[966]651
[1146]652class PumeResultsTable(WAeUPTable): ###(
653
[966]654    meta_type = 'WAeUP PumeResults Tool'
[2094]655    name = "portal_pumeresults"
[966]656    key = "jamb_reg_no"
[2094]657    def __init__(self,name=None):
658        if name ==  None:
659            name = self.name
660        WAeUPTable.__init__(self, name)
[966]661
662
663InitializeClass(PumeResultsTable)
664
[1146]665###)
[971]666
[2094]667class ApplicantsCatalog(WAeUPTable): ###(
668
[2113]669    meta_type = 'WAeUP Applicants Catalog'
[2094]670    name = "applicants_catalog"
671    key = "reg_no"
672    security = ClassSecurityInfo()
[2537]673    #export_keys = (
674    #               "reg_no",
675    #               "status",
676    #               "lastname",
677    #               "sex",
678    #               "date_of_birth",
679    #               "lga",
680    #               "email",
681    #               "phone",
682    #               "passport",
683    #               "entry_mode",
684    #               "pin",
685    #               "screening_type",
686    #               "registration_date",
687    #               "testdate",
688    #               "application_date",
689    #               "screening_date",
690    #               "faculty",
691    #               "department",
692    #               "course1",
693    #               "course2",
694    #               "course3",
695    #               "eng_score",
696    #               "subj1",
697    #               "subj1score",
698    #               "subj2",
699    #               "subj2score",
700    #               "subj3",
701    #               "subj3score",
702    #               "aggregate",
703    #               "course_admitted",
704    #               )
[2632]705
[2094]706    def __init__(self,name=None):
707        if name ==  None:
708            name = self.name
709        WAeUPTable.__init__(self, name)
710
[2185]711    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
712    def new_importCSV(self,filename="JAMB_data",
713                  schema_id="application",
[2503]714                  layout_id="import_application",
[2185]715                  mode='add'):
716        """ import JAMB data """
717        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
718        pm = self.portal_membership
719        member = pm.getAuthenticatedMember()
720        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
721        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
722        import_fn = "%s/import/%s.csv" % (i_home,filename)
723        if mode not in ('add','edit'):
724            logger.info("invalid mode: %s" % mode)
725        if os.path.exists(lock_fn):
726            logger.info("import of %(import_fn)s already in progress" % vars())
727            return
728        lock_file = open(lock_fn,"w")
729        lock_file.write("%(current)s \n" % vars())
730        lock_file.close()
731        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
732        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
733        stool = getToolByName(self, 'portal_schemas')
734        ltool = getToolByName(self, 'portal_layouts')
735        schema = stool._getOb(schema_id)
736        if schema is None:
737            em = 'No such schema %s' % schema_id
738            logger.error(em)
739            return
740        for postfix in ('_import',''):
741            layout_name = "%(layout_id)s%(postfix)s" % vars()
742            if hasattr(ltool,layout_name):
743                break
744        layout = ltool._getOb(layout_name)
745        if layout is None:
746            em = 'No such layout %s' % layout_id
747            logger.error(em)
748            return
749        try:
750            csv_file = csv.DictReader(open(import_fn,"rb"))
751        except:
752            em = 'Error reading %s.csv' % filename
753            logger.error(em)
[2191]754            return
[2185]755        d = self._import_new(csv_items,schema,layout,mode,logger)
756        imported = []
757        edited = []
758        duplicates = []
759        not_found = []
760        if len(d['valid_records']) > 0:
761            for record in d['valid_records']:
762                #import pdb;pdb.set_trace()
763                if mode == "add":
764                    try:
765                        self.addRecord(**dict(record.items()))
766                        imported.append(**dict(record.items()))
767                        logger.info("added %s" % record.items())
768                    except ValueError:
769                        dupplicate.append(**dict(record.items()))
770                        logger.info("duplicate %s" % record.items())
771                elif mode == "edit":
772                    try:
773                        self.modifyRecord(**dict(record.items()))
774                        edited.append(**dict(record.items()))
775                        logger.info("edited %s" % record.items())
776                    except KeyError:
777                        not_found.append(**dict(record.items()))
778                        logger.info("not found %s" % record.items())
779        invalid = d['invalid_records']
780        for itype in ("imported","edited","not_found","duplicate","invalid"):
781            outlist = locals[itype]
782            if len(outlist):
783                d = {}
784                for k in outlist[0].keys():
785                    d[k] = k
[2191]786                outlist[0] = d
[2185]787                outfile = open("file_name_%s" % itype,'w')
788                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
789                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
790###)
791
[2094]792    security.declareProtected(ModifyPortalContent,"importCSV")###(
793    def importCSV(self,filename="JAMB_data",
794                  schema_id="application",
[2508]795                  layout_id="application_pce",
[2094]796                  mode='add'):
797        """ import JAMB data """
798        stool = getToolByName(self, 'portal_schemas')
799        ltool = getToolByName(self, 'portal_layouts')
800        schema = stool._getOb(schema_id)
801        if schema is None:
802            em = 'No such schema %s' % schema_id
803            logger.error(em)
804            return
805        layout = ltool._getOb(layout_id)
806        if layout is None:
807            em = 'No such layout %s' % layout_id
808            logger.error(em)
809            return
[2099]810        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
[2185]811        d = self._import_old(filename,schema,layout,mode,logger)
[2094]812        if len(d['valid_records']) > 0:
813            for record in d['valid_records']:
814                #import pdb;pdb.set_trace()
815                if mode == "add":
816                    self.addRecord(**dict(record.items()))
817                    logger.info("added %s" % record.items())
818                elif mode == "edit":
819                    self.modifyRecord(**dict(record.items()))
820                    logger.info("edited %s" % record.items())
821                else:
822                    logger.info("invalid mode: %s" % mode)
823        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
[2632]824    ###)
[2094]825
826InitializeClass(ApplicantsCatalog)
827
828###)
829
[1146]830class StudentsCatalog(WAeUPTable): ###(
[1620]831    security = ClassSecurityInfo()
[1146]832
[971]833    meta_type = 'WAeUP Students Catalog'
834    name = "students_catalog"
835    key = "id"
[1700]836    affected_types = {   ###(
[1749]837                      'StudentApplication':
[2069]838                      {'id': 'application',
839                       'fields':
840                       ('jamb_reg_no',
841                        'entry_mode',
842                        #'entry_level',
843                        'entry_session',
844                       )
845                      },
[1700]846                      'StudentClearance':
[2069]847                      {'id': 'clearance',
848                       'fields':
849                       ('matric_no',
850                        'lga',
[5200]851                        'date_of_birth',  # birthday
[2069]852                       )
853                      },
854                      'StudentPersonal':
855                      {'id': 'personal',
856                       'fields':
857                       ('name',
858                        'sex',
859                        'perm_address',
860                        'email',
861                        'phone',
[5200]862                        'marit_stat',
[2069]863                       )
864                      },
865                      'StudentStudyCourse':
866                      {'id': 'study_course',
867                       'fields':
868                       ('course', # study_course
869                        'faculty', # from certificate
870                        'department', # from certificate
871                        'end_level', # from certificate
872                        'level', # current_level
[3780]873                        'mode',  # from certificate
[2069]874                        'session', # current_session
875                        'verdict', # current_verdict
876                       )
877                      },
878                     }
[1700]879    ###)
[1625]880
[2094]881    def __init__(self,name=None):
882        if name ==  None:
883            name = self.name
884        WAeUPTable.__init__(self, name)
[1620]885        return
[1625]886
[1700]887    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
888        """ clears the whole enchilada """
889        self._catalog.clear()
[971]890
[1700]891        if REQUEST and RESPONSE:
892            RESPONSE.redirect(
893              URL1 +
894              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
[971]895
[1700]896    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
897        """ clear the catalog, then re-index everything """
898
899        elapse = time.time()
900        c_elapse = time.clock()
901
902        pgthreshold = self._getProgressThreshold()
903        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
904        self.refreshCatalog(clear=1, pghandler=handler)
905
906        elapse = time.time() - elapse
907        c_elapse = time.clock() - c_elapse
908
909        RESPONSE.redirect(
910            URL1 +
911            '/manage_catalogAdvanced?manage_tabs_message=' +
912            urllib.quote('Catalog Updated \n'
913                         'Total time: %s\n'
914                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
915    ###)
916
[2084]917    def fill_certificates_dict(self): ###(
[2078]918        "return certificate data in  dict"
919        certificates_brains = self.portal_catalog(portal_type ='Certificate')
920        d = {}
921        for cb in certificates_brains:
922            certificate_doc = cb.getObject().getContent()
923            cb_path = cb.getPath().split('/')
924            ld = {}
925            ld['faculty'] = cb_path[-4]
926            ld['department'] = cb_path[-3]
927            ld['end_level'] = getattr(certificate_doc,'end_level','999')
[3348]928            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
[2078]929            d[cb.getId] = ld
930        return d
[2084]931    ###)
932
[2078]933    def get_from_doc_department(self,doc,cached_data={}): ###(
[1620]934        "return the students department"
[1700]935        if doc is None:
[1620]936            return None
[3354]937        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
[3348]938            return self._v_certificates[doc.study_course]['department']
[1700]939        certificate_res = self.portal_catalog(id = doc.study_course)
[1620]940        if len(certificate_res) != 1:
941            return None
942        return certificate_res[0].getPath().split('/')[-3]
943
[2078]944    def get_from_doc_faculty(self,doc,cached_data={}):
[1700]945        "return the students faculty"
946        if doc is None:
[1620]947            return None
[3354]948        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
[3348]949            return self._v_certificates[doc.study_course]['faculty']
[1700]950        certificate_res = self.portal_catalog(id = doc.study_course)
951        if len(certificate_res) != 1:
952            return None
953        return certificate_res[0].getPath().split('/')[-4]
[1620]954
[2099]955    def get_from_doc_end_level(self,doc,cached_data={}):
[2069]956        "return the students end_level"
957        if doc is None:
958            return None
[3354]959        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
[3348]960            return self._v_certificates[doc.study_course]['end_level']
[2069]961        certificate_res = self.portal_catalog(id = doc.study_course)
962        if len(certificate_res) != 1:
963            return None
964        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
965
[2078]966    def get_from_doc_level(self,doc,cached_data={}):
[1700]967        "return the students level"
968        if doc is None:
[1620]969            return None
[1700]970        return getattr(doc,'current_level',None)
[1620]971
[3780]972    #def get_from_doc_mode(self,doc,cached_data={}):
973    #    "return the students mode"
974    #    if doc is None:
975    #        return None
976    #    cm = getattr(doc,'current_mode',None)
977    #    return cm
978   
[2078]979    def get_from_doc_mode(self,doc,cached_data={}):
[1705]980        "return the students mode"
[1700]981        if doc is None:
[1620]982            return None
[3780]983        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
984            return self._v_certificates[doc.study_course]['study_mode']
985        certificate_res = self.portal_catalog(id = doc.study_course)
986        if len(certificate_res) != 1:
987            return None
988        return getattr(certificate_res[0].getObject().getContent(),'study_mode','unknown')   
[1625]989
[1749]990
[5200]991    def get_from_doc_marit_stat(self,doc,cached_data={}):
992        "return the students marit_stat"
993        if doc is None:
994            return None
995        ms = getattr(doc,'marit_stat',None)
996        if ms == True:
997            return 'married'
998        elif ms == False:
999            return 'single'
1000        else:
1001            return 'undefined'
1002           
1003    def get_from_doc_date_of_birth(self,doc,cached_data={}):
1004        "return the students date of birth"
1005        if doc is None:
1006            return None
1007        return getattr(doc,'birthday',None)           
1008
[2078]1009    def get_from_doc_session(self,doc,cached_data={}):
[1705]1010        "return the students current_session"
1011        if doc is None:
1012            return None
1013        return getattr(doc,'current_session',None)
1014
[2078]1015    def get_from_doc_entry_session(self,doc,cached_data={}):
[1700]1016        "return the students entry_session"
1017        if doc is None:
[1620]1018            return None
[1705]1019        es = getattr(doc,'entry_session',None)
[3958]1020        if es is not None and len(es) < 3:
[1705]1021            return es
[3743]1022        elif len(es) == 9:
1023            return es[2:4]   
[1700]1024        try:
1025            digit = int(doc.jamb_reg_no[0])
1026        except:
[1986]1027            return "-1"
[3958]1028        if digit < 9:
[1700]1029            return "0%c" % doc.jamb_reg_no[0]
1030        return "9%c" % doc.jamb_reg_no[0]
1031
[2078]1032    def get_from_doc_course(self,doc,cached_data={}):
[1620]1033        "return the students study_course"
[1700]1034        if doc is None:
[1620]1035            return None
[1700]1036        return getattr(doc,'study_course',None)
[1620]1037
[2078]1038    def get_from_doc_name(self,doc,cached_data={}):
[1620]1039        "return the students name from the personal"
[1700]1040        if doc is None:
[1620]1041            return None
1042        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
1043
[2078]1044    def get_from_doc_verdict(self,doc,cached_data={}):
[1700]1045        "return the students study_course"
1046        if doc is None:
[1620]1047            return None
[1700]1048        return getattr(doc,'current_verdict',None)
[1702]1049    ###)
[1620]1050
[1702]1051    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
[3781]1052        if not hasattr(self,'_v_certificates'):
1053            self._v_certificates = self.fill_certificates_dict()
[1702]1054        if isinstance(name, str):
1055            name = (name,)
[1749]1056        reindextypes = {}
[1702]1057        reindex_special = []
1058        for n in name:
[3781]1059            if n in ("review_state"):
[1702]1060                reindex_special.append(n)
1061            else:
1062                for pt in self.affected_types.keys():
[1707]1063                    if n in self.affected_types[pt]['fields']:
[1702]1064                        if reindextypes.has_key(pt):
1065                            reindextypes[pt].append(n)
1066                        else:
1067                            reindextypes[pt]= [n]
1068                        break
[3781]1069        #cached_data = {}
1070        #if set(name).intersection(set(('faculty','department','end_level','mode'))):
1071        #    cached_data = self.fill_certificates_dict()
[1702]1072        students = self.portal_catalog(portal_type="Student")
[1954]1073        if hasattr(self,'portal_catalog_real'):
1074            aq_portal = self.portal_catalog_real.evalAdvancedQuery
1075        else:
1076            aq_portal = self.portal_catalog.evalAdvancedQuery
[1702]1077        num_objects = len(students)
1078        if pghandler:
1079            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1080        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
[2084]1081        #import pdb;pdb.set_trace()
[1702]1082        for i in xrange(num_objects):
1083            if pghandler: pghandler.report(i)
1084            student_brain = students[i]
[1707]1085            student_object = student_brain.getObject()
[1702]1086            data = {}
1087            modified = False
1088            sid = data['id'] = student_brain.getId
1089            if reindex_special and 'review_state' in reindex_special:
1090                modified = True
[3665]1091                data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
[1707]1092            sub_objects = False
1093            for pt in reindextypes.keys():
[1702]1094                modified = True
[1707]1095                try:
1096                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1097                    sub_objects = True
1098                except:
1099                    continue
[2084]1100                for field in set(name).intersection(self.affected_types[pt]['fields']):
[1707]1101                    if hasattr(self,'get_from_doc_%s' % field):
[3781]1102                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc)
[1707]1103                    else:
1104                        data[field] = getattr(doc,field)
1105            if not sub_objects and noattr:
1106                import_res = self.returning_import(id = sid)
1107                if not import_res:
1108                    continue
1109                import_record = import_res[0]
1110                data['matric_no'] = import_record.matric_no
1111                data['sex'] = import_record.Sex == 'F'
1112                data['name'] = "%s %s %s" % (import_record.Firstname,
1113                                             import_record.Middlename,
1114                                             import_record.Lastname)
[1815]1115                data['jamb_reg_no'] = import_record.Entryregno
[1702]1116            if modified:
1117                self.modifyRecord(**data)
1118        if pghandler: pghandler.finish()
1119    ###)
[1620]1120
1121    def refreshCatalog(self, clear=0, pghandler=None): ###(
1122        """ re-index everything we can find """
1123        students_folder = self.portal_url.getPortalObject().campus.students
1124        if clear:
[1724]1125            self._catalog.clear()
[1700]1126        students = self.portal_catalog(portal_type="Student")
1127        num_objects = len(students)
[3781]1128        #cached_data = self.fill_certificates_dict()
1129        if not hasattr(self,'_v_certificates'):
1130            self._v_certificates = self.fill_certificates_dict()
[1620]1131        if pghandler:
1132            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1133        for i in xrange(num_objects):
1134            if pghandler: pghandler.report(i)
[1700]1135            student_brain = students[i]
1136            spath = student_brain.getPath()
[1727]1137            student_object = student_brain.getObject()
[1620]1138            data = {}
[1700]1139            sid = data['id'] = student_brain.getId
[3665]1140            #data['review_state'] = student_brain.review_state
1141            data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
[1707]1142            sub_objects = False
1143            for pt in self.affected_types.keys():
1144                modified = True
1145                try:
1146                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1147                    sub_objects = True
1148                except:
[1727]1149                    #from pdb import set_trace;set_trace()
[1707]1150                    continue
1151                for field in self.affected_types[pt]['fields']:
1152                    if hasattr(self,'get_from_doc_%s' % field):
[2078]1153                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1154                                                                              cached_data=cached_data)
[1707]1155                    else:
[1727]1156                        data[field] = getattr(doc,field,None)
1157            if not sub_objects:
[1700]1158                import_res = self.returning_import(id = sid)
1159                if not import_res:
[1620]1160                    continue
[1700]1161                import_record = import_res[0]
1162                data['matric_no'] = import_record.matric_no
1163                data['sex'] = import_record.Sex == 'F'
1164                data['name'] = "%s %s %s" % (import_record.Firstname,
1165                                             import_record.Middlename,
1166                                             import_record.Lastname)
[1815]1167                data['jamb_reg_no'] = import_record.Entryregno
[1700]1168            self.addRecord(**data)
[1620]1169        if pghandler: pghandler.finish()
1170    ###)
1171
[1700]1172    security.declarePrivate('notify_event_listener') ###(
[1620]1173    def notify_event_listener(self,event_type,object,infos):
1174        "listen for events"
[1716]1175        if not infos.has_key('rpath'):
1176            return
[1702]1177        pt = getattr(object,'portal_type',None)
1178        mt = getattr(object,'meta_type',None)
[1954]1179        students_catalog = self
[1702]1180        data = {}
1181        if pt == 'Student' and\
1182           mt == 'CPS Proxy Folder' and\
1183           event_type.startswith('workflow'):
1184            data['id'] = object.getId()
1185            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1186            students_catalog.modifyRecord(**data)
1187            return
[1700]1188        rpl = infos['rpath'].split('/')
[2396]1189        if pt == 'Student' and mt == 'CPS Proxy Folder':
[1700]1190            student_id = object.id
[2396]1191            if event_type == "sys_add_object":
1192                try:
1193                    self.addRecord(id = student_id)
1194                except ValueError:
1195                    pass
1196                return
1197            elif event_type == 'sys_del_object':
1198                self.deleteRecord(student_id)
[1716]1199        if pt not in self.affected_types.keys():
[1700]1200            return
[1716]1201        if event_type not in ('sys_modify_object'):
1202            return
[1700]1203        if mt == 'CPS Proxy Folder':
1204            return
[3354]1205        if not hasattr(self,'_v_certificates'):
1206            self._v_certificates = self.fill_certificates_dict()
[1716]1207        for field in self.affected_types[pt]['fields']:
[1700]1208            if hasattr(self,'get_from_doc_%s' % field):
1209                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1210            else:
1211                data[field] = getattr(object,field)
1212        data['id'] = rpl[2]
[1716]1213        self.modifyRecord(**data)
[1700]1214    ###)
[1620]1215
[1625]1216
[971]1217InitializeClass(StudentsCatalog)
1218
[1146]1219###)
1220
[3354]1221class CertificatesCatalog(WAeUPTable): ###(
1222    security = ClassSecurityInfo()
1223
1224    meta_type = 'WAeUP Certificates Catalog'
1225    name =  "certificates_catalog"
1226    key = "code"
1227    def __init__(self,name=None):
1228        if name ==  None:
1229            name =  self.name
1230        WAeUPTable.__init__(self, name)
1231
1232    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1233        """ clear the catalog, then re-index everything """
1234
1235        elapse = time.time()
1236        c_elapse = time.clock()
1237
1238        pgthreshold = self._getProgressThreshold()
1239        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1240        self.refreshCatalog(clear=1, pghandler=handler)
1241
1242        elapse = time.time() - elapse
1243        c_elapse = time.clock() - c_elapse
1244
1245        RESPONSE.redirect(
1246            URL1 +
1247            '/manage_catalogAdvanced?manage_tabs_message=' +
1248            urllib.quote('Catalog Updated \n'
1249                         'Total time: %s\n'
1250                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1251    ###)
1252
1253    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1254        if isinstance(name, str):
1255            name = (name,)
1256        certificates = self.portal_catalog(portal_type="Certificate")
[3497]1257        num_objects = len(certificates)
[3354]1258        if pghandler:
1259            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1260        for i in xrange(num_objects):
1261            if pghandler: pghandler.report(i)
1262            certificate_brain = certificates[i]
1263            certificate_object = certificate_brain.getObject()
1264            pl = certificate_brain.getPath().split('/')
1265            data = {}
1266            cid = data[self.key] = certificate_brain.getId
1267            data['faculty'] = pl[-4]
1268            data['department'] = pl[-3]
1269            doc = certificate_object.getContent()
1270            for field in name:
1271                if field not in (self.key,'faculty','department'):
1272                    data[field] = getattr(doc,field)
1273            self.modifyRecord(**data)
1274        if pghandler: pghandler.finish()
1275    ###)
1276
1277    def refreshCatalog(self, clear=0, pghandler=None): ###(
1278        """ re-index everything we can find """
1279        if clear:
1280            self._catalog.clear()
1281        certificates = self.portal_catalog(portal_type="Certificate")
1282        num_objects = len(certificates)
1283        if pghandler:
1284            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1285        #from pdb import set_trace;set_trace()
1286        for i in xrange(num_objects):
1287            if pghandler: pghandler.report(i)
1288            certificate_brain = certificates[i]
1289            certificate_doc = certificate_brain.getObject().getContent()
1290            pl = certificate_brain.getPath().split('/')
1291            data = {}
1292            for field in self.schema():
1293                data[field] = getattr(certificate_doc,field,None)
1294            data[self.key] = certificate_brain.getId
1295            ai = pl.index('academics')
1296            data['faculty'] = pl[ai +1]
1297            data['department'] = pl[ai +2]
1298            if clear:
1299                self.addRecord(**data)
1300            else:
1301                self.modifyRecord(**data)
1302        if pghandler: pghandler.finish()
1303    ###)
1304
1305    security.declarePrivate('notify_event_listener') ###(
1306    def notify_event_listener(self,event_type,object,infos):
1307        "listen for events"
1308        if not infos.has_key('rpath'):
1309            return
1310        pt = getattr(object,'portal_type',None)
1311        mt = getattr(object,'meta_type',None)
1312        if pt != 'Certificate':
1313            return
1314        data = {}
1315        rpl = infos['rpath'].split('/')
1316        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1317            return
1318        certificate_id = object.getId()
1319        data[self.key] = certificate_id
1320        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1321            try:
1322                self.addRecord(**data)
1323            except ValueError:
1324                return
1325            certificate_id = object.getId()
1326            doc = object.getContent()
1327            if doc is None:
1328                return
1329            for field in self.schema():
1330                data[field] = getattr(doc,field,None)
1331            data[self.key] = certificate_id
1332            ai = rpl.index('academics')
1333            data['faculty'] = rpl[ai +1]
1334            data['department'] = rpl[ai +2]
1335            self.modifyRecord(**data)
1336            return
1337        if event_type == "sys_del_object":
1338            self.deleteRecord(certificate_id)
1339            return
1340        if event_type == "sys_modify_object" and mt == 'Certificate':
1341            #from pdb import set_trace;set_trace()
1342            for field in self.schema():
1343                data[field] = getattr(object,field,None)
1344            certificate_id = object.aq_parent.getId()
1345            data[self.key] = certificate_id
1346            ai = rpl.index('academics')
1347            data['faculty'] = rpl[ai +1]
1348            data['department'] = rpl[ai +2]
1349            self.modifyRecord(**data)
1350    ###)
1351
1352
1353InitializeClass(CertificatesCatalog)
1354###)
1355
[1146]1356class CoursesCatalog(WAeUPTable): ###(
[1716]1357    security = ClassSecurityInfo()
[1146]1358
1359    meta_type = 'WAeUP Courses Catalog'
[2094]1360    name =  "courses_catalog"
[1146]1361    key = "code"
[2094]1362    def __init__(self,name=None):
1363        if name ==  None:
1364            name =  self.name
1365        WAeUPTable.__init__(self, name)
[1146]1366
[1716]1367    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1368        """ clear the catalog, then re-index everything """
[1146]1369
[1716]1370        elapse = time.time()
1371        c_elapse = time.clock()
1372
1373        pgthreshold = self._getProgressThreshold()
1374        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1375        self.refreshCatalog(clear=1, pghandler=handler)
1376
1377        elapse = time.time() - elapse
1378        c_elapse = time.clock() - c_elapse
1379
1380        RESPONSE.redirect(
1381            URL1 +
1382            '/manage_catalogAdvanced?manage_tabs_message=' +
1383            urllib.quote('Catalog Updated \n'
1384                         'Total time: %s\n'
1385                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1386    ###)
1387
1388    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1389        if isinstance(name, str):
1390            name = (name,)
1391        courses = self.portal_catalog(portal_type="Course")
1392        num_objects = len(courses)
1393        if pghandler:
1394            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1395        for i in xrange(num_objects):
1396            if pghandler: pghandler.report(i)
1397            course_brain = courses[i]
1398            course_object = course_brain.getObject()
1399            pl = course_brain.getPath().split('/')
1400            data = {}
1401            cid = data[self.key] = course_brain.getId
1402            data['faculty'] = pl[-4]
1403            data['department'] = pl[-3]
1404            doc = course_object.getContent()
1405            for field in name:
1406                if field not in (self.key,'faculty','department'):
1407                    data[field] = getattr(doc,field)
1408            self.modifyRecord(**data)
1409        if pghandler: pghandler.finish()
1410    ###)
1411
1412    def refreshCatalog(self, clear=0, pghandler=None): ###(
1413        """ re-index everything we can find """
[1724]1414        if clear:
1415            self._catalog.clear()
[1716]1416        courses = self.portal_catalog(portal_type="Course")
1417        num_objects = len(courses)
1418        if pghandler:
1419            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
[1724]1420        #from pdb import set_trace;set_trace()
[1716]1421        for i in xrange(num_objects):
1422            if pghandler: pghandler.report(i)
1423            course_brain = courses[i]
[1724]1424            course_doc = course_brain.getObject().getContent()
[1716]1425            pl = course_brain.getPath().split('/')
1426            data = {}
[1724]1427            for field in self.schema():
[1749]1428                data[field] = getattr(course_doc,field,None)
[1716]1429            data[self.key] = course_brain.getId
[1724]1430            ai = pl.index('academics')
1431            data['faculty'] = pl[ai +1]
1432            data['department'] = pl[ai +2]
1433            if clear:
1434                self.addRecord(**data)
1435            else:
1436                self.modifyRecord(**data)
[1716]1437        if pghandler: pghandler.finish()
1438    ###)
1439
1440    security.declarePrivate('notify_event_listener') ###(
1441    def notify_event_listener(self,event_type,object,infos):
1442        "listen for events"
1443        if not infos.has_key('rpath'):
1444            return
1445        pt = getattr(object,'portal_type',None)
1446        mt = getattr(object,'meta_type',None)
1447        if pt != 'Course':
1448            return
1449        data = {}
1450        rpl = infos['rpath'].split('/')
1451        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1452            return
1453        course_id = object.getId()
1454        data[self.key] = course_id
[1724]1455        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
[1716]1456            try:
1457                self.addRecord(**data)
1458            except ValueError:
[1724]1459                return
1460            course_id = object.getId()
1461            doc = object.getContent()
1462            if doc is None:
1463                return
1464            for field in self.schema():
[1749]1465                data[field] = getattr(doc,field,None)
[1724]1466            data[self.key] = course_id
1467            ai = rpl.index('academics')
1468            data['faculty'] = rpl[ai +1]
1469            data['department'] = rpl[ai +2]
1470            self.modifyRecord(**data)
1471            return
[1716]1472        if event_type == "sys_del_object":
1473            self.deleteRecord(course_id)
[1724]1474            return
[1716]1475        if event_type == "sys_modify_object" and mt == 'Course':
[1724]1476            #from pdb import set_trace;set_trace()
[1716]1477            for field in self.schema():
[1749]1478                data[field] = getattr(object,field,None)
[1716]1479            course_id = object.aq_parent.getId()
1480            data[self.key] = course_id
[1724]1481            ai = rpl.index('academics')
1482            data['faculty'] = rpl[ai +1]
1483            data['department'] = rpl[ai +2]
[1716]1484            self.modifyRecord(**data)
1485    ###)
1486
1487
[1146]1488InitializeClass(CoursesCatalog)
[1151]1489###)
[1146]1490
[2084]1491class CourseResults(WAeUPTable): ###(
[2069]1492    security = ClassSecurityInfo()
1493
1494    meta_type = 'WAeUP Results Catalog'
1495    name = "course_results"
[2084]1496    key = "key" #student_id + level + course_id
[2094]1497    def __init__(self,name=None):
1498        if name ==  None:
1499            name = self.name
1500        WAeUPTable.__init__(self, name)
[2084]1501        self._queue = []
[2099]1502
[2094]1503    def addMultipleRecords(self, records): ###(
1504        """add many records"""
[3362]1505        existing_uids = []
[2094]1506        for data in records:
[3362]1507            uid = "%(student_id)s|%(level_id)s|%(course_id)s" % data
[2094]1508            data['%s' % self.key] = uid
[3362]1509            query = Eq(self.key, uid)
1510            res = self.course_results.evalAdvancedQuery(query)
[2094]1511            if len(res) > 0:
[3362]1512                rec = res[0]
1513                equal = True
1514                for attr in ('student_id','level_id','course_id'):
1515                    if getattr(rec,attr,'') != data[attr]:
1516                        equal = False
1517                        break
1518                if equal:
1519                    existing_uids += uid,
1520                    continue
[2094]1521            self.catalog_object(dict2ob(data), uid=uid)
[3362]1522        return existing_uids
[2094]1523    ###)
1524
[2434]1525    def deleteResultsHere(self,level_id,student_id): ###(
1526        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1527        course_results = self.course_results.evalAdvancedQuery(query)
[3362]1528        #import pdb;pdb.set_trace()
[2434]1529        for result in course_results:
1530            self.deleteRecord(result.key)
[2084]1531    ###)
1532
[2434]1533    def moveResultsHere(self,level,student_id): ###(
1534        #import pdb;pdb.set_trace()
1535        level_id = level.getId()
1536        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1537        course_results = self.course_results.evalAdvancedQuery(query)
[2437]1538        existing_courses = [cr.code for cr in course_results]
[2434]1539        to_delete = []
1540        for code,obj in level.objectItems():
[2437]1541            to_delete.append(code)
[2434]1542            carry_over = False
1543            if code.endswith('_co'):
1544                carry_over = True
[2437]1545                code  = code[:-3]
[2434]1546            if code in existing_courses:
[2094]1547                continue
[2434]1548            course_result_doc = obj.getContent()
[2094]1549            data = {}
[2434]1550            course_id = code
[2094]1551            for field in self.schema():
1552                data[field] = getattr(course_result_doc,field,'')
1553            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
[2099]1554            data['student_id'] = student_id
1555            data['level_id'] = level_id
[2439]1556            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
[2442]1557            data['session_id'] = session_id
[2434]1558            #data['queue_status'] = OBJECT_CREATED
[2099]1559            data['code'] = course_id
[2434]1560            data['carry_over'] = carry_over
[2094]1561            self.catalog_object(dict2ob(data), uid=key)
[2434]1562        level.manage_delObjects(to_delete)
1563    ###)
1564
1565    def getCourses(self,student_id,level_id): ###(
[2094]1566        query = Eq('student_id',student_id) & Eq('level_id', level_id)
[2434]1567        course_results = self.course_results.evalAdvancedQuery(query)
1568        carry_overs = []
[2606]1569        normal1 = []
1570        normal2 = []
[2761]1571        normal3 = []
[2757]1572        total_credits = 0
1573        gpa = 0
[2434]1574        for brain in course_results:
1575            d = {}
[2781]1576
[2761]1577            for field in self.schema():
[3707]1578                d[field] = getattr(brain,field,None)
1579                if repr(d[field]) == 'Missing.Value':
1580                    d[field] = ''
[2761]1581            d['weight'] = ''
1582            d['grade'] = ''
1583            d['score'] = ''
1584
[2750]1585            if str(brain.credits).isdigit():
[2757]1586                credits = int(brain.credits)
1587                total_credits += credits
[2780]1588                score = getattr(brain,'score',0)
1589                if score and str(score).isdigit() and int(score) > 0:
1590                    score = int(score)
[3707]1591                    grade,weight = self.getGradesFromScore(score,'')
[2781]1592                    gpa += weight * credits
1593                    d['weight'] = weight
1594                    d['grade'] = grade
1595                    d['score'] = score
[3675]1596
[3707]1597            #if str(brain.ca1).isdigit() and str(brain.ca2).isdigit() and str(brain.exam).isdigit():
1598            #    d['score_calc'] = int(brain.ca1) + int(brain.ca2) + int(brain.exam)
1599            #else:
1600            #    d['score_calc'] = ''
1601            try:
1602                d['score_calc'] = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1603            except:
[3675]1604                d['score_calc'] = ''
1605
[3707]1606            if d['score_calc']:
1607                grade = self.getGradesFromScore(d['score_calc'],level_id)
1608                d['grade'] = grade
1609
[2757]1610            d['coe'] = ''
[2434]1611            if brain.core_or_elective:
1612                d['coe'] = 'Core'
[2757]1613            elif brain.core_or_elective == False:
1614                d['coe'] = 'Elective'
[2434]1615            id = code = d['id'] = brain.code
1616            d['code'] = code
[2864]1617            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1618            if res:
1619                course = res[0]
1620                d['title'] = course.title
1621                # The courses_catalog contains strings and integers in its semester field.
1622                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1623                d['semester'] = str(course.semester)
1624            else:
[2866]1625                d['title'] = "Course has been removed from course list"
[2864]1626                d['semester'] = ''
[2448]1627            if brain.carry_over:
[2761]1628                d['coe'] = 'CO'
[2434]1629                carry_overs.append(d)
1630            else:
[2614]1631                if d['semester'] == '1':
[2606]1632                    normal1.append(d)
[2614]1633
1634                elif d['semester'] == '2':
[2606]1635                    normal2.append(d)
1636                else:
1637                    normal3.append(d)
1638        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1639        #                                "%(semester)s%(code)s" % y))
[2503]1640        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
[2460]1641                                             "%(semester)s%(code)s" % y))
[2757]1642        return total_credits,gpa,carry_overs,normal1,normal2,normal3
[2094]1643    ###)
1644
[4693]1645   
1646    # for transcript only
[3602]1647    def getAllCourses(self,student_id): ###(
1648        query = Eq('student_id',student_id)
1649        course_results = self.course_results.evalAdvancedQuery(query)
[3603]1650        courses = []
[3602]1651        for brain in course_results:
1652            d = {}
1653
1654            for field in self.schema():
1655                d[field] = getattr(brain,field,'')
1656
1657            d['weight'] = ''
1658            d['grade'] = ''
1659            d['score'] = ''
1660
1661            if str(brain.credits).isdigit():
1662                credits = int(brain.credits)
1663                score = getattr(brain,'score',0)
1664                if score and str(score).isdigit() and int(score) > 0:
1665                    score = int(score)
[4693]1666                    grade,weight = self.getGradesFromScore(score,'')
[3602]1667                    d['weight'] = weight
1668                    d['grade'] = grade
1669                    d['score'] = score
1670            d['coe'] = ''
1671            if brain.core_or_elective:
1672                d['coe'] = 'Core'
1673            elif brain.core_or_elective == False:
1674                d['coe'] = 'Elective'
1675            id = code = d['id'] = brain.code
1676            d['code'] = code
1677            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1678            if res:
1679                course = res[0]
1680                d['title'] = course.title
1681                # The courses_catalog contains strings and integers in its semester field.
1682                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1683                d['semester'] = str(course.semester)
1684            else:
1685                d['title'] = "Course has been removed from course list"
1686                d['semester'] = ''
1687            if brain.carry_over:
1688                d['coe'] = 'CO'
1689            courses.append(d)
1690        return courses
1691    ###)
[3841]1692   
[4036]1693    def getYearGroupAverage(self,session_id,level_id): ###(
1694        query = Eq('session_id',session_id) & Eq('level_id',level_id)
1695        course_results = self.course_results.evalAdvancedQuery(query)
1696        yga1 = 0
[4302]1697        yg1 = []
[4036]1698        counter1 = 0
1699        yga2 = 0
[4302]1700        yg2 = []
[4036]1701        counter2 = 0
1702        yga3 = 0
[4302]1703        yg3 = []
[4036]1704        counter3 = 0       
1705        #import pdb;pdb.set_trace()
1706        for brain in course_results:
1707            try:
[4302]1708                om = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1709                if not om > 0:
[4036]1710                    continue
1711                code = brain.code               
1712                res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1713                if res:
1714                    course = res[0]
1715                    # The courses_catalog contains strings and integers in its semester field.
1716                    # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1717                    semester = str(course.semester)
1718                else:
1719                    semester = ''
1720                if semester == '1':
1721                    counter1 += 1
[4302]1722                    yga1 += om
1723                    yg1.append(om)
[4036]1724                elif semester == '2':
1725                    counter2 += 1
[4302]1726                    yga2 += om     
1727                    yg2.append(om)   
[4036]1728                if semester == '3':
1729                    counter3 += 1
[4302]1730                    yga3 += om
1731                    yg3.append(om)
[4036]1732            except:
1733                continue               
1734        if counter1:
1735            yga1 /= counter1
1736            yga1 = '%.2f' % yga1   
1737        if counter2:
1738            yga2 /= counter2
1739            yga2 = '%.2f' % yga2   
1740        if counter3:
1741            yga3 /= counter3
1742            yga3 = '%.2f' % yga3                                   
[4302]1743        return yga1, yga2, yga3, counter1, counter2, counter3, yg1, yg2, yg3
[4036]1744    ###)
1745   
[4302]1746   
1747    #security.declarePublic("calculateCoursePosition")
1748    def calculateCoursePosition(self,session_id,level_id,code,score,semester=None):
1749        #"""calculate Course Position"""
1750        query = Eq('session_id',session_id) & Eq('level_id',level_id) & Eq('code',code)
1751        course_results = self.course_results.evalAdvancedQuery(query)
1752        ygc = []
1753        #import pdb;pdb.set_trace() 
1754        for brain in course_results:
1755            try:
1756                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
1757                    continue
1758                #code = brain.code   
1759                if semester:
1760                    res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1761                    if res:
1762                        course = res[0]
1763                        # The courses_catalog contains strings and integers in its semester field.
1764                        # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1765                        semester_from_course = str(course.semester)
1766                    else:
1767                        continue
1768                    if semester != semester_from_course:
1769                        continue
1770                ygc.append(float(brain.ca1) + float(brain.ca2) + float(brain.exam))
1771            except:
1772                continue     
1773        ygc.sort(reverse=True)
1774        if not len(ygc):
1775            return 'no result'
1776        #import pdb;pdb.set_trace()       
1777        for pos in range(len(ygc)):
1778            if ygc[pos] <= float(score):
1779                break
1780        output = {}   
1781        output['pos'] =  '%d of %d' % (pos+1,len(ygc))
1782        output['ygc'] = ygc
1783        return output
1784       
1785    security.declareProtected(ModifyPortalContent,"calculateAllCoursePositions")
1786    def calculateAllCoursePositions(self,session_id=None):
1787        """calculate All Course Positions"""
[4304]1788        logger = logging.getLogger('WAeUPTables.CourseResults.calculateAllCoursePositions')
[4306]1789        member = self.portal_membership.getAuthenticatedMember()
[4304]1790        logger.info('%s starts recalculation of positions in session %s' % (member,session_id))
[4302]1791        if session_id:
1792            query = Eq('session_id',session_id)
1793        else:
1794            return 'no session_id provided'
1795        course_results = self.course_results.evalAdvancedQuery(query)
1796        for brain in course_results:
1797            try:
1798                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
[5146]1799                    data = {}
1800                    data[self.key] = brain.key
1801                    data['pic'] = ''
1802                    self.modifyRecord(**data)                   
[4302]1803                    continue
1804                res = self.courses_catalog.evalAdvancedQuery(Eq('code',brain.code))
1805                if res:
1806                    course = res[0]
1807                    semester_from_course = str(course.semester)
1808                else:
1809                    continue                   
1810                score = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
[5143]1811                pic = self.calculateCoursePosition(session_id,brain.level_id,brain.code,score,semester_from_course)['pos']
[4302]1812                data = {}
1813                data[self.key] = brain.key
1814                data['pic'] = pic
1815                self.modifyRecord(**data)
1816            except:
[5146]1817                data = {}
1818                data[self.key] = brain.key
1819                data['pic'] = ''
1820                self.modifyRecord(**data)
[4304]1821                continue       
1822        logger.info('recalculation finished')             
[4302]1823        return 'ready'   
1824   
[3988]1825    def exportRemoveAllCourses(self,student_id,export=False,remove=False): ###(
[3841]1826        ""
1827        query = Eq('student_id',student_id)
1828        cr_catalog = self.course_results
1829        course_results = cr_catalog.evalAdvancedQuery(query)
1830        courses = []
1831        fields = self.schema()
1832        format = '"%(' + ')s","%('.join(fields) + ')s"'
1833        for brain in course_results:
1834            d = {}
1835            for field in fields:
1836                d[field] = getattr(brain,field,'')
1837            courses.append(format % d)
1838               
1839        if export:
1840            export_file = "%s/export/course_results_removed.csv" % (i_home)
[3989]1841            if not os.path.exists(export_file): 
1842                file_handler = open(export_file,"a")
1843                headline = ','.join(fields)
1844                file_handler.write(headline +'\n')
1845            else:
1846                file_handler = open(export_file,"a")
[3841]1847            for line in courses:
1848                file_handler.write(line +'\n')
[3602]1849
[3841]1850        if remove:
1851            for brain in course_results:
1852                key = getattr(brain,'key','')
1853                cr_catalog.deleteRecord(key)
1854       
1855        return courses
1856    ###)   
[3984]1857   
1858   
[3841]1859
[3602]1860InitializeClass(CourseResults)
1861###)
1862
[1625]1863class OnlinePaymentsImport(WAeUPTable): ###(
[1620]1864
1865    meta_type = 'WAeUP Online Payment Transactions'
[1625]1866    name = "online_payments_import"
[1620]1867    key = "order_id"
[2094]1868    def __init__(self,name=None):
1869        if name ==  None:
1870            name = self.name
1871        WAeUPTable.__init__(self, name)
[1620]1872
1873
[2069]1874InitializeClass(OnlinePaymentsImport)
[1620]1875###)
1876
[1151]1877class ReturningImport(WAeUPTable): ###(
[1146]1878
[1151]1879    meta_type = 'Returning Import Table'
1880    name = "returning_import"
[1146]1881    key = "matric_no"
[2094]1882    def __init__(self,name=None):
1883        if name ==  None:
1884            name = self.name
1885        WAeUPTable.__init__(self, name)
[1146]1886
1887
[1151]1888InitializeClass(ReturningImport)
1889###)
[1146]1890
1891class ResultsImport(WAeUPTable): ###(
1892
1893    meta_type = 'Results Import Table'
1894    name = "results_import"
1895    key = "key"
[2094]1896    def __init__(self,name=None):
1897        if name ==  None:
1898            name = self.name
1899        WAeUPTable.__init__(self, name)
[1146]1900
1901
1902InitializeClass(ResultsImport)
1903
1904###)
1905
1906class PaymentsCatalog(WAeUPTable): ###(
[2738]1907    security = ClassSecurityInfo()
[1146]1908
1909    meta_type = 'WAeUP Payments Catalog'
[2868]1910    name = "payments_catalog"
1911    key = "order_id"
[2094]1912    def __init__(self,name=None):
1913        if name ==  None:
1914            name = self.name
1915        WAeUPTable.__init__(self, name)
[1146]1916
[2859]1917
[2738]1918    security.declarePrivate('notify_event_listener') ###(
1919    def notify_event_listener(self,event_type,object,infos):
1920        "listen for events"
1921        if not infos.has_key('rpath'):
1922            return
1923        pt = getattr(object,'portal_type',None)
1924        mt = getattr(object,'meta_type',None)
1925        data = {}
[2904]1926        if pt != 'Payment':
1927            return
[3469]1928        if event_type == 'sys_del_object' and mt == 'CPS Proxy Folder':
1929            self.deleteRecord(object.getContent().order_id)
[2904]1930        if mt == 'CPS Proxy Folder':
[2911]1931            return # is handled only for the real object
[2738]1932        if event_type not in ('sys_modify_object'):
1933            return
1934        for field in self.schema():
[2859]1935            data[field] = getattr(object,field,'')
[2738]1936        rpl = infos['rpath'].split('/')
[2904]1937        #import pdb;pdb.set_trace()
1938        student_id = rpl[-4]
[2738]1939        data['student_id'] = student_id
[2907]1940        modified = False
[2859]1941        try:
1942            self.modifyRecord(**data)
[2907]1943            modified = True
[2859]1944        except KeyError:
[2926]1945            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1946            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1947            pass
[2907]1948        if not modified:
1949            try:
1950                self.addRecord(**data)
1951            except:
[2976]1952                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
[2907]1953                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1954        ###)
[1146]1955
[2738]1956
[3988]1957    def exportRemoveAllPayments(self,student_id,export=False,remove=False): ###(
[3984]1958        ""
1959        query = Eq('student_id',student_id)
1960        pm_catalog = self.payments_catalog
1961        payments = pm_catalog.evalAdvancedQuery(query)
1962        payments_dic = []
1963        fields = self.schema()
1964        format = '"%(' + ')s","%('.join(fields) + ')s"'
1965        for brain in payments:
1966            d = {}
1967            for field in fields:
1968                d[field] = getattr(brain,field,'')
1969            payments_dic.append(format % d)
1970               
1971        if export:
1972            export_file = "%s/export/payments_removed.csv" % (i_home)
[3989]1973            if not os.path.exists(export_file): 
1974                file_handler = open(export_file,"a")
1975                headline = ','.join(fields)
1976                file_handler.write(headline +'\n')
1977            else:
1978                file_handler = open(export_file,"a")
[3984]1979            for line in payments_dic:
1980                file_handler.write(line +'\n')
1981
1982        if remove:
1983            for brain in payments:
1984                order_id = getattr(brain,'order_id','')
1985                pm_catalog.deleteRecord(order_id)
1986       
1987        return payments_dic
1988    ###)   
1989
[1146]1990InitializeClass(PaymentsCatalog)
1991
1992###)
1993
[4302]1994class RemovedStudentIds(WAeUPTable): ###(
1995
1996    meta_type = 'WAeUP Removed StudentIds'
1997    name = "removed_student_ids"
1998    key = "id"
1999    def __init__(self,name=None):
2000        if name ==  None:
2001            name = self.name
2002        WAeUPTable.__init__(self, name)
2003
2004
2005InitializeClass(RemovedStudentIds)
2006
2007###)
2008
[414]2009# BBB:
2010AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.