source: WAeUP_SRP/base/WAeUPTables.py @ 2480

Last change on this file since 2480 was 2460, checked in by joachim, 17 years ago

fix refreshlevel + add_course_result

  • Property svn:keywords set to Id
File size: 53.9 KB
RevLine 
[966]1#-*- mode: python; mode: fold -*-
[363]2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 2460 2007-10-28 14:46:29Z joachim $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
[1620]24from Products.ZCatalog.ProgressHandler import ZLogHandler
[780]25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
[2094]27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
[1700]33import urllib
[1620]34import DateTime,time
[780]35import csv,re
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
[2084]41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43
[363]44from interfaces import IWAeUPTable
45
46class AttributeHolder(object):
47    pass
48
49def dict2ob(dict):
50    ob = AttributeHolder()
51    for key, value in dict.items():
52        setattr(ob, key, value)
53    return ob
54
[1146]55class WAeUPTable(ZCatalog): ###(
[834]56
[363]57    implements(IWAeUPTable)
[780]58    security = ClassSecurityInfo()
[2094]59    meta_type = None
[2099]60
[2094]61    def __init__(self,name=None):
62        if name ==  None:
63            name = self.name
64        ZCatalog.__init__(self,name)
[2099]65
[2094]66    def refreshCatalog(self, clear=0, pghandler=None): ###(
[1620]67        """ don't refresh for a normal table """
68
69        if self.REQUEST and self.REQUEST.RESPONSE:
70            self.REQUEST.RESPONSE.redirect(
71              URL1 +
72              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
73
[2094]74###)
75
76    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
[1620]77        """ clears the whole enchilada """
[1986]78
[1916]79        #if REQUEST and RESPONSE:
80        #    RESPONSE.redirect(
81        #      URL1 +
82        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
[1620]83
[1916]84        self._catalog.clear()
[1620]85        if REQUEST and RESPONSE:
86            RESPONSE.redirect(
87              URL1 +
[1916]88              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
[1620]89
[2094]90###)
91
[2189]92    def record2dict(self,fields,record):
93        d = {}
94        for key in fields:
95            v = getattr(record, key, None)
[2192]96            if key == 'sex':
97                if v:
98                    v = 'F'
99                else:
100                    v = 'M'
101                d[key] = v
102            elif v:
[2189]103                if key == 'lga':
104                    v = self.portal_vocabularies.local_gov_areas.get(v)
105                d[key] = v
106            else:
107                d[key] = ''
108        return d
[2191]109
[2094]110    def addRecord(self, **data): ###(
[502]111        # The uid is the same as "bed".
112        uid = data[self.key]
113        res = self.searchResults({"%s" % self.key : uid})
114        if len(res) > 0:
115            raise ValueError("More than one record with uid %s" % uid)
116        self.catalog_object(dict2ob(data), uid=uid)
117        return uid
[834]118
[2094]119###)
120
[363]121    def deleteRecord(self, uid):
122        self.uncatalog_object(uid)
[834]123
[502]124    def searchAndSetRecord(self, **data):
125        raise NotImplemented
126
[2094]127    def modifyRecord(self, record=None, **data): ###(
[502]128        #records = self.searchResults(uid=uid)
129        uid = data[self.key]
[2069]130        if record is None:
131            records = self.searchResults({"%s" % self.key : uid})
132            if len(records) > 1:
133                # Can not happen, but anyway...
134                raise ValueError("More than one record with uid %s" % uid)
135            if len(records) == 0:
136                raise KeyError("No record for uid %s" % uid)
137            record = records[0]
[363]138        record_data = {}
139        for field in self.schema() + self.indexes():
140            record_data[field] = getattr(record, field)
141        # Add the updated data:
142        record_data.update(data)
143        self.catalog_object(dict2ob(record_data), uid)
144
[2094]145###)
146
147    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
[1062]148        if isinstance(name, str):
[2094]149            name =  (name,)
[1062]150        paths = self._catalog.uids.items()
151        i = 0
152        #import pdb;pdb.set_trace()
153        for p,rid in paths:
154            i += 1
155            metadata = self.getMetadataForRID(rid)
156            record_data = {}
157            for field in name:
158                record_data[field] = metadata.get(field)
159            uid = metadata.get(self.key)
160            self.catalog_object(dict2ob(record_data), uid, idxs=name,
161                                update_metadata=0)
[1082]162
[2094]163###)
164
165    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
[780]166    def exportAllRecords(self):
167        "export a WAeUPTable"
168        #import pdb;pdb.set_trace()
169        fields = [field for field in self.schema()]
170        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
171        csv = []
172        csv.append(','.join(['"%s"' % fn for fn in fields]))
173        for uid in self._catalog.uids:
174            records = self.searchResults({"%s" % self.key : uid})
175            if len(records) > 1:
176                # Can not happen, but anyway...
177                raise ValueError("More than one record with uid %s" % uid)
178            if len(records) == 0:
179                raise KeyError("No record for uid %s" % uid)
180            rec = records[0]
181            csv.append(format % rec)
182        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
183        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
[2094]184
185###)
186
[2189]187    security.declareProtected(ModifyPortalContent,"dumpAll")###(
188    def dumpAll(self):
189        """dump all data in the table to a csv"""
190        member = self.portal_membership.getAuthenticatedMember()
191        logger = logging.getLogger('WAeUPTables.dump_%s' % self.__name__)
192        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
193        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
194        print export_file
195        res_list = []
196        lines = []
197        if hasattr(self,"export_keys"):
198            fields = self.export_keys
199        else:
200            fields = []
201            for f in self.schema():
202                fields.append(f)
203        headline = ','.join(fields)
204        #open(export_file,"a").write(headline +'\n')
205        out = open(export_file,"wb")
206        out.write(headline +'\n')
207        out.close()
208        out = open(export_file,"a")
209        csv_writer = csv.DictWriter(out,fields,)
210        format = '"%(' + ')s","%('.join(fields) + ')s"'
211        records = self()
212        nr2export = len(records)
213        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
214        chunk = 2000
215        total = 0
216        start = DateTime.DateTime().timeTime()
217        start_chunk = DateTime.DateTime().timeTime()
218        for record in records:
219            not_all = False
220            d = self.record2dict(fields,record)
221            #d['state'],d['lga'] = formatLGA(d['lga'],voc = self.portal_vocabularies.local_gov_areas)
222            #lines.append(format % d)
223            lines.append(d)
224            total += 1
225            if total and not total % chunk or total == len(records):
226                #open(export_file,"a").write('\n'.join(lines) +'\n')
227                csv_writer.writerows(lines)
228                anz = len(lines)
229                logger.info("wrote %(anz)d  total written %(total)d" % vars())
230                end_chunk = DateTime.DateTime().timeTime()
231                duration = end_chunk-start_chunk
232                per_record = duration/anz
233                till_now = end_chunk - start
234                avarage_per_record = till_now/total
235                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
236                estimated_end = estimated_end.strftime("%H:%M:%S")
237                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
238                start_chunk = DateTime.DateTime().timeTime()
239                lines = []
240        end = DateTime.DateTime().timeTime()
241        logger.info('total time %6.2f m' % ((end-start)/60))
242        import os
243        filename, extension = os.path.splitext(export_file)
244        from subprocess import call
245        msg = "wrote %(total)d records to %(export_file)s" % vars()
246        try:
247            retcode = call('gzip %s' % (export_file),shell=True)
248            if retcode == 0:
249                msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
250        except OSError, e:
251            retcode = -99
252            logger.info("zip failed with %s" % e)
253        logger.info(msg)
254        args = {'portal_status_message': msg}
255        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
256        url = self.REQUEST.get('URL2')
257        return self.REQUEST.RESPONSE.redirect(url)
258    ###)
259
[2185]260    security.declarePrivate("_import_old") ###(
261    def _import_old(self,filename,schema,layout, mode,logger):
[2094]262        "import data from csv"
263        import transaction
264        import random
265        pm = self.portal_membership
266        member = pm.getAuthenticatedMember()
267        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
268        import_fn = "%s/import/%s.csv" % (i_home,filename)
269        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
270        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
271        start = True
272        tr_count = 1
273        total_imported = 0
274        total_not_imported = 0
275        total = 0
276        iname =  "%s" % filename
[2112]277        not_imported = []
278        imported = []
279        valid_records = []
280        invalid_records = []
281        d = {}
282        d['mode'] = mode
283        d['imported'] = total_imported
284        d['not_imported'] = total_not_imported
285        d['valid_records'] = valid_records
286        d['invalid_records'] = invalid_records
287        d['import_fn'] = import_fn
288        d['imported_fn'] = imported_fn
289        d['not_imported_fn'] = not_imported_fn
[2094]290        if schema is None:
291            em = 'No schema specified'
292            logger.error(em)
[2112]293            return d
[2094]294        if layout is None:
295            em = 'No layout specified'
296            logger.error(em)
[2112]297            return d
[2094]298        validators = {}
299        for widget in layout.keys():
[2112]300            try:
301                validators[widget] = layout[widget].validate
302            except AttributeError:
303                logger.info('%s has no validate attribute' % widget)
304                return d
[2094]305        # if mode == 'edit':
306        #     importer = self.importEdit
307        # elif mode == 'add':
308        #     importer = self.importAdd
309        # else:
310        #     importer = None
311        try:
[2185]312            items = csv.DictReader(open(import_fn,"rb"),
313                                   dialect="excel",
314                                   skipinitialspace=True)
[2094]315        except:
316            em = 'Error reading %s.csv' % filename
317            logger.error(em)
318            return d
[2185]319        #import pdb;pdb.set_trace()
[2094]320        for item in items:
321            if start:
322                start = False
323                logger.info('%s starts import from %s.csv' % (member,filename))
324                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
[2185]325                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
326                                   dialect="excel",
327                                   skipinitialspace=True).next()
[2094]328                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
329                diff2schema = set(import_keys).difference(set(schema.keys()))
330                diff2layout = set(import_keys).difference(set(layout.keys()))
331                if diff2layout:
332                    em = "not ignorable key(s) %s found in heading" % diff2layout
333                    logger.info(em)
334                    return d
335                s = ','.join(['"%s"' % fn for fn in import_keys])
336                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
337                #s = '"id",' + s
338                open(imported_fn,"a").write(s + '\n')
339                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
340                format_error = format + ',"%(Error)s"'
341                #format = '"%(id)s",'+ format
342                adapters = [MappingStorageAdapter(schema, item)]
343            dm = DataModel(item, adapters,context=self)
344            ds = DataStructure(data=item,datamodel=dm)
345            error_string = ""
346            for k in import_keys:
347                if not validators[k](ds,mode=mode):
348                    error_string += " %s : %s" % (k,ds.getError(k))
349            # if not error_string and importer:
350            #     item.update(dm)
351            #     item['id'],error = importer(item)
352            #     if error:
353            #         error_string += error
354            if error_string:
355                item['Error'] = error_string
356                invalid_records.append(dm)
357                not_imported.append(format_error % item)
358                total_not_imported += 1
359            else:
360                em = format % item
361                valid_records.append(dm)
362                imported.append(em)
363                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
364                tr_count += 1
365                total_imported += 1
366            total += 1
367        if len(imported) > 0:
368            open(imported_fn,"a").write('\n'.join(imported))
369        if len(not_imported) > 0:
370            open(not_imported_fn,"a").write('\n'.join(not_imported))
371        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
372        d['imported'] = total_imported
373        d['not_imported'] = total_not_imported
374        d['valid_records'] = valid_records
375        d['invalid_records'] = invalid_records
376        d['imported_fn'] = imported_fn
377        d['not_imported_fn'] = not_imported_fn
378        #logger.info(em)
379        return d
[1935]380    ###)
[2185]381
382    security.declarePrivate("_import") ###(
383    def _import_new(self,csv_items,schema, layout, mode,logger):
384        "import data from csv.Dictreader Instance"
385        start = True
386        tr_count = 1
387        total_imported = 0
388        total_not_imported = 0
389        total = 0
390        iname =  "%s" % filename
391        not_imported = []
392        valid_records = []
393        invalid_records = []
394        duplicate_records = []
395        d = {}
396        d['mode'] = mode
397        d['valid_records'] = valid_records
398        d['invalid_records'] = invalid_records
399        d['invalid_records'] = duplicate_records
400        # d['import_fn'] = import_fn
401        # d['imported_fn'] = imported_fn
402        # d['not_imported_fn'] = not_imported_fn
403        validators = {}
404        for widget in layout.keys():
405            try:
406                validators[widget] = layout[widget].validate
407            except AttributeError:
408                logger.info('%s has no validate attribute' % widget)
409                return d
410        for item in csv_items:
411            if start:
412                start = False
413                logger.info('%s starts import from %s.csv' % (member,filename))
414                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
415                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
416                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
417                diff2schema = set(import_keys).difference(set(schema.keys()))
418                diff2layout = set(import_keys).difference(set(layout.keys()))
419                if diff2layout:
420                    em = "not ignorable key(s) %s found in heading" % diff2layout
421                    logger.info(em)
422                    return d
423                # s = ','.join(['"%s"' % fn for fn in import_keys])
424                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
425                # #s = '"id",' + s
426                # open(imported_fn,"a").write(s + '\n')
427                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
428                # format_error = format + ',"%(Error)s"'
429                # #format = '"%(id)s",'+ format
430                adapters = [MappingStorageAdapter(schema, item)]
431            dm = DataModel(item, adapters,context=self)
432            ds = DataStructure(data=item,datamodel=dm)
433            error_string = ""
434            for k in import_keys:
435                if not validators[k](ds,mode=mode):
436                    error_string += " %s : %s" % (k,ds.getError(k))
437            if error_string:
438                item['Error'] = error_string
439                #invalid_records.append(dm)
440                invalid_records.append(item)
441                total_not_imported += 1
442            else:
443                em = format % item
444                valid_records.append(dm)
445                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
446                tr_count += 1
447                total_imported += 1
448            total += 1
449        # if len(imported) > 0:
450        #     open(imported_fn,"a").write('\n'.join(imported))
451        # if len(not_imported) > 0:
452        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
453        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
454        d['imported'] = total_imported
455        d['not_imported'] = total_not_imported
456        d['valid_records'] = valid_records
457        d['invalid_records'] = invalid_records
458        return d
459    ###)
460
[2396]461    security.declarePublic("missingValue")###(
462    def missingValue(self):
463        from Missing import MV
464        return MV
465    ###)
[2094]466###)
[834]467
[1146]468class AccommodationTable(WAeUPTable): ###(
[834]469
[404]470    meta_type = 'WAeUP Accommodation Tool'
[2094]471    name = "portal_accommodation"
[502]472    key = "bed"
[2094]473    def __init__(self,name=None):
474        if name ==  None:
475            name = self.name
476        WAeUPTable.__init__(self, name)
[363]477
[635]478    def searchAndReserveBed(self, student_id,bed_type):
479        records = self.searchResults({'student' : student_id})
480        if len(records) > 0:
[1293]481            return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
[834]482
[673]483        records = [r for r in self.searchResults({'bed_type' : bed_type}) if not r.student]
[686]484        #import pdb;pdb.set_trace()
[635]485        if len(records) == 0:
[1306]486            return -2,"No bed available"
[635]487        rec = records[0]
488        self.modifyRecord(bed=rec.bed,student=student_id)
[1571]489        s_logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
490        s_logger.info('%s reserved bed %s' % (student_id,rec.bed))
[635]491        return 1,rec.bed
[363]492
[834]493
[404]494InitializeClass(AccommodationTable)
[411]495
[1146]496###)
497
498class PinTable(WAeUPTable): ###(
[1030]499    from ZODB.POSException import ConflictError
[440]500    meta_type = 'WAeUP Pin Tool'
[2094]501    name = "portal_pins"
[502]502    key = 'pin'
[2094]503    def __init__(self,name=None):
504        if name ==  None:
505            name = self.name
506        WAeUPTable.__init__(self, name)
[1082]507
508
[710]509    def searchAndSetRecord(self, uid, student_id,prefix):
[2191]510
511        # The following line must be activated after resetting the
512        # the portal_pins table. This is to avoid duplicate entries
[2184]513        # and disable duplicate payments.
[2191]514
[2184]515        #student_id = student_id.upper()
516
[710]517        records = self.searchResults(student = student_id)
[1776]518        if len(records) > 0 and prefix in ('CLR','APP'):
[710]519            for r in records:
[834]520                if r.pin != uid and r.prefix_batch.startswith(prefix):
[710]521                    return -2
[502]522        records = self.searchResults({"%s" % self.key : uid})
523        if len(records) > 1:
524            # Can not happen, but anyway...
525            raise ValueError("More than one record with uid %s" % uid)
526        if len(records) == 0:
527            return -1
528        record = records[0]
529        if record.student == "":
530            record_data = {}
531            for field in self.schema() + self.indexes():
532                record_data[field] = getattr(record, field)
533            # Add the updated data:
[635]534            record_data['student'] = student_id
[1030]535            try:
536                self.catalog_object(dict2ob(record_data), uid)
537                return 1
538            except ConflictError:
539                return 2
[990]540        if record.student.upper() != student_id.upper():
[502]541            return 0
[997]542        if record.student.upper() == student_id.upper():
[502]543            return 2
[997]544        return -3
[440]545InitializeClass(PinTable)
[1146]546###)
[966]547
[1146]548class PumeResultsTable(WAeUPTable): ###(
549
[966]550    meta_type = 'WAeUP PumeResults Tool'
[2094]551    name = "portal_pumeresults"
[966]552    key = "jamb_reg_no"
[2094]553    def __init__(self,name=None):
554        if name ==  None:
555            name = self.name
556        WAeUPTable.__init__(self, name)
[966]557
558
559InitializeClass(PumeResultsTable)
560
[1146]561###)
[971]562
[2094]563class ApplicantsCatalog(WAeUPTable): ###(
564
[2113]565    meta_type = 'WAeUP Applicants Catalog'
[2094]566    name = "applicants_catalog"
567    key = "reg_no"
568    security = ClassSecurityInfo()
[2189]569    export_keys = (
570                   "reg_no",
571                   "status",
572                   "lastname",
573                   "sex",
574                   "date_of_birth",
575                   "lga",
576                   "email",
577                   "phone",
578                   "passport",
579                   "entry_mode",
580                   "pin",
581                   "screening_type",
582                   "registration_date",
583                   "testdate",
584                   "application_date",
585                   "screening_date",
586                   "faculty",
587                   "department",
588                   "course1",
589                   "course2",
590                   "course3",
591                   "eng_score",
592                   "subj1",
593                   "subj1score",
594                   "subj2",
595                   "subj2score",
596                   "subj3",
597                   "subj3score",
598                   "aggregate",
599                   "course_admitted",
600                   )
[2094]601    def __init__(self,name=None):
602        if name ==  None:
603            name = self.name
604        WAeUPTable.__init__(self, name)
605
[2189]606
[2191]607
[2185]608    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
609    def new_importCSV(self,filename="JAMB_data",
610                  schema_id="application",
611                  layout_id="application_import",
612                  mode='add'):
613        """ import JAMB data """
614        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
615        pm = self.portal_membership
616        member = pm.getAuthenticatedMember()
617        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
618        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
619        import_fn = "%s/import/%s.csv" % (i_home,filename)
620        if mode not in ('add','edit'):
621            logger.info("invalid mode: %s" % mode)
622        if os.path.exists(lock_fn):
623            logger.info("import of %(import_fn)s already in progress" % vars())
624            return
625        lock_file = open(lock_fn,"w")
626        lock_file.write("%(current)s \n" % vars())
627        lock_file.close()
628        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
629        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
630        stool = getToolByName(self, 'portal_schemas')
631        ltool = getToolByName(self, 'portal_layouts')
632        schema = stool._getOb(schema_id)
633        if schema is None:
634            em = 'No such schema %s' % schema_id
635            logger.error(em)
636            return
637        for postfix in ('_import',''):
638            layout_name = "%(layout_id)s%(postfix)s" % vars()
639            if hasattr(ltool,layout_name):
640                break
641        layout = ltool._getOb(layout_name)
642        if layout is None:
643            em = 'No such layout %s' % layout_id
644            logger.error(em)
645            return
646        try:
647            csv_file = csv.DictReader(open(import_fn,"rb"))
648        except:
649            em = 'Error reading %s.csv' % filename
650            logger.error(em)
[2191]651            return
[2185]652        d = self._import_new(csv_items,schema,layout,mode,logger)
653        imported = []
654        edited = []
655        duplicates = []
656        not_found = []
657        if len(d['valid_records']) > 0:
658            for record in d['valid_records']:
659                #import pdb;pdb.set_trace()
660                if mode == "add":
661                    try:
662                        self.addRecord(**dict(record.items()))
663                        imported.append(**dict(record.items()))
664                        logger.info("added %s" % record.items())
665                    except ValueError:
666                        dupplicate.append(**dict(record.items()))
667                        logger.info("duplicate %s" % record.items())
668                elif mode == "edit":
669                    try:
670                        self.modifyRecord(**dict(record.items()))
671                        edited.append(**dict(record.items()))
672                        logger.info("edited %s" % record.items())
673                    except KeyError:
674                        not_found.append(**dict(record.items()))
675                        logger.info("not found %s" % record.items())
676        invalid = d['invalid_records']
677        for itype in ("imported","edited","not_found","duplicate","invalid"):
678            outlist = locals[itype]
679            if len(outlist):
680                d = {}
681                for k in outlist[0].keys():
682                    d[k] = k
[2191]683                outlist[0] = d
[2185]684                outfile = open("file_name_%s" % itype,'w')
685                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
686                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
687###)
688
[2094]689    security.declareProtected(ModifyPortalContent,"importCSV")###(
690    def importCSV(self,filename="JAMB_data",
691                  schema_id="application",
[2368]692                  layout_id="application_pde",
[2094]693                  mode='add'):
694        """ import JAMB data """
695        stool = getToolByName(self, 'portal_schemas')
696        ltool = getToolByName(self, 'portal_layouts')
697        schema = stool._getOb(schema_id)
698        if schema is None:
699            em = 'No such schema %s' % schema_id
700            logger.error(em)
701            return
702        layout = ltool._getOb(layout_id)
703        if layout is None:
704            em = 'No such layout %s' % layout_id
705            logger.error(em)
706            return
[2099]707        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
[2185]708        d = self._import_old(filename,schema,layout,mode,logger)
[2094]709        if len(d['valid_records']) > 0:
710            for record in d['valid_records']:
711                #import pdb;pdb.set_trace()
712                if mode == "add":
713                    self.addRecord(**dict(record.items()))
714                    logger.info("added %s" % record.items())
715                elif mode == "edit":
716                    self.modifyRecord(**dict(record.items()))
717                    logger.info("edited %s" % record.items())
718                else:
719                    logger.info("invalid mode: %s" % mode)
720        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
721###)
722
723InitializeClass(ApplicantsCatalog)
724
725###)
726
[1146]727class StudentsCatalog(WAeUPTable): ###(
[1620]728    security = ClassSecurityInfo()
[1146]729
[971]730    meta_type = 'WAeUP Students Catalog'
731    name = "students_catalog"
732    key = "id"
[1700]733    affected_types = {   ###(
[1749]734                      'StudentApplication':
[2069]735                      {'id': 'application',
736                       'fields':
737                       ('jamb_reg_no',
738                        'entry_mode',
739                        #'entry_level',
740                        'entry_session',
741                       )
742                      },
[1700]743                      'StudentClearance':
[2069]744                      {'id': 'clearance',
745                       'fields':
746                       ('matric_no',
747                        'lga',
748                       )
749                      },
750                      'StudentPersonal':
751                      {'id': 'personal',
752                       'fields':
753                       ('name',
754                        'sex',
755                        'perm_address',
756                        'email',
757                        'phone',
758                       )
759                      },
760                      'StudentStudyCourse':
761                      {'id': 'study_course',
762                       'fields':
763                       ('course', # study_course
764                        'faculty', # from certificate
765                        'department', # from certificate
766                        'end_level', # from certificate
767                        'level', # current_level
768                        'mode',  # current_mode
769                        'session', # current_session
770                        'verdict', # current_verdict
771                       )
772                      },
773                     }
[1700]774    ###)
[1625]775
[2094]776    def __init__(self,name=None):
777        if name ==  None:
778            name = self.name
779        WAeUPTable.__init__(self, name)
[1620]780        return
[1625]781
[1700]782    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
783        """ clears the whole enchilada """
784        self._catalog.clear()
[971]785
[1700]786        if REQUEST and RESPONSE:
787            RESPONSE.redirect(
788              URL1 +
789              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
[971]790
[1700]791    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
792        """ clear the catalog, then re-index everything """
793
794        elapse = time.time()
795        c_elapse = time.clock()
796
797        pgthreshold = self._getProgressThreshold()
798        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
799        self.refreshCatalog(clear=1, pghandler=handler)
800
801        elapse = time.time() - elapse
802        c_elapse = time.clock() - c_elapse
803
804        RESPONSE.redirect(
805            URL1 +
806            '/manage_catalogAdvanced?manage_tabs_message=' +
807            urllib.quote('Catalog Updated \n'
808                         'Total time: %s\n'
809                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
810    ###)
811
[2084]812    def fill_certificates_dict(self): ###(
[2078]813        "return certificate data in  dict"
814        certificates_brains = self.portal_catalog(portal_type ='Certificate')
815        d = {}
816        for cb in certificates_brains:
817            certificate_doc = cb.getObject().getContent()
818            cb_path = cb.getPath().split('/')
819            ld = {}
820            ld['faculty'] = cb_path[-4]
821            ld['department'] = cb_path[-3]
822            ld['end_level'] = getattr(certificate_doc,'end_level','999')
823            d[cb.getId] = ld
824        return d
[2084]825    ###)
826
[2078]827    def get_from_doc_department(self,doc,cached_data={}): ###(
[1620]828        "return the students department"
[1700]829        if doc is None:
[1620]830            return None
[2078]831        if cached_data.has_key(doc.study_course):
832            return cached_data[doc.study_course]['department']
[1700]833        certificate_res = self.portal_catalog(id = doc.study_course)
[1620]834        if len(certificate_res) != 1:
835            return None
836        return certificate_res[0].getPath().split('/')[-3]
837
[2078]838    def get_from_doc_faculty(self,doc,cached_data={}):
[1700]839        "return the students faculty"
840        if doc is None:
[1620]841            return None
[2078]842        if cached_data.has_key(doc.study_course):
843            return cached_data[doc.study_course]['faculty']
[1700]844        certificate_res = self.portal_catalog(id = doc.study_course)
845        if len(certificate_res) != 1:
846            return None
847        return certificate_res[0].getPath().split('/')[-4]
[1620]848
[2099]849    def get_from_doc_end_level(self,doc,cached_data={}):
[2069]850        "return the students end_level"
851        if doc is None:
852            return None
[2078]853        if cached_data.has_key(doc.study_course):
854            return cached_data[doc.study_course]['end_level']
[2069]855        certificate_res = self.portal_catalog(id = doc.study_course)
856        if len(certificate_res) != 1:
857            return None
858        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
859
[2078]860    def get_from_doc_level(self,doc,cached_data={}):
[1700]861        "return the students level"
862        if doc is None:
[1620]863            return None
[1700]864        return getattr(doc,'current_level',None)
[1620]865
[2078]866    def get_from_doc_mode(self,doc,cached_data={}):
[1705]867        "return the students mode"
[1700]868        if doc is None:
[1620]869            return None
[1705]870        cm = getattr(doc,'current_mode',None)
871        return cm
[1625]872
[1749]873
[2078]874    def get_from_doc_session(self,doc,cached_data={}):
[1705]875        "return the students current_session"
876        if doc is None:
877            return None
878        return getattr(doc,'current_session',None)
879
[2078]880    def get_from_doc_entry_session(self,doc,cached_data={}):
[1700]881        "return the students entry_session"
882        if doc is None:
[1620]883            return None
[1705]884        es = getattr(doc,'entry_session',None)
[1729]885        if es is not None and len(es) == 2:
[1705]886            return es
[1700]887        try:
888            digit = int(doc.jamb_reg_no[0])
889        except:
[1986]890            return "-1"
[1700]891        if digit < 8:
892            return "0%c" % doc.jamb_reg_no[0]
893        return "9%c" % doc.jamb_reg_no[0]
894
[2078]895    def get_from_doc_course(self,doc,cached_data={}):
[1620]896        "return the students study_course"
[1700]897        if doc is None:
[1620]898            return None
[1700]899        return getattr(doc,'study_course',None)
[1620]900
[2078]901    def get_from_doc_name(self,doc,cached_data={}):
[1620]902        "return the students name from the personal"
[1700]903        if doc is None:
[1620]904            return None
905        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
906
[2078]907    def get_from_doc_verdict(self,doc,cached_data={}):
[1700]908        "return the students study_course"
909        if doc is None:
[1620]910            return None
[1700]911        return getattr(doc,'current_verdict',None)
[1702]912    ###)
[1620]913
[1702]914    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
915        if isinstance(name, str):
916            name = (name,)
[1749]917        reindextypes = {}
[1702]918        reindex_special = []
919        for n in name:
920            if n in ("review_state","registered_courses"):
921                reindex_special.append(n)
922            else:
923                for pt in self.affected_types.keys():
[1707]924                    if n in self.affected_types[pt]['fields']:
[1702]925                        if reindextypes.has_key(pt):
926                            reindextypes[pt].append(n)
927                        else:
928                            reindextypes[pt]= [n]
929                        break
[2078]930        cached_data = {}
931        if set(name).intersection(set(('faculty','department','end_level'))):
932            cached_data = self.fill_certificates_dict()
[1702]933        students = self.portal_catalog(portal_type="Student")
[1954]934        if hasattr(self,'portal_catalog_real'):
935            aq_portal = self.portal_catalog_real.evalAdvancedQuery
936        else:
937            aq_portal = self.portal_catalog.evalAdvancedQuery
[1702]938        num_objects = len(students)
939        if pghandler:
940            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
941        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
[2084]942        #import pdb;pdb.set_trace()
[1702]943        for i in xrange(num_objects):
944            if pghandler: pghandler.report(i)
945            student_brain = students[i]
[1707]946            student_object = student_brain.getObject()
[2084]947            # query = Eq('path',student_brain.getPath())
948            # sub_brains_list = aq_portal(query)
949            # sub_brains = {}
950            # for sub_brain in sub_brains_list:
951            #     sub_brains[sub_brain.portal_type] = sub_brain
952            # student_path = student_brain.getPath()
[1702]953            data = {}
954            modified = False
955            sid = data['id'] = student_brain.getId
956            if reindex_special and 'review_state' in reindex_special:
957                modified = True
958                data['review_state'] = student_brain.review_state
[1707]959            sub_objects = False
960            for pt in reindextypes.keys():
[1702]961                modified = True
[1707]962                try:
963                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
[2084]964                    #doc = sub_brains[pt].getObject().getContent()
965                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
966                    # doc = self.unrestrictedTraverse(path).getContent()
[1707]967                    sub_objects = True
968                except:
969                    continue
[2084]970                for field in set(name).intersection(self.affected_types[pt]['fields']):
[1707]971                    if hasattr(self,'get_from_doc_%s' % field):
[2078]972                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
973                                                                              cached_data=cached_data)
[1707]974                    else:
975                        data[field] = getattr(doc,field)
976            if not sub_objects and noattr:
977                import_res = self.returning_import(id = sid)
978                if not import_res:
979                    continue
980                import_record = import_res[0]
981                data['matric_no'] = import_record.matric_no
982                data['sex'] = import_record.Sex == 'F'
983                data['name'] = "%s %s %s" % (import_record.Firstname,
984                                             import_record.Middlename,
985                                             import_record.Lastname)
[1815]986                data['jamb_reg_no'] = import_record.Entryregno
[2454]987            #if reindex_special and 'registered_courses' in reindex_special:
988            #    try:
989            #        study_course = getattr(student_object,"study_course")
990            #        level_ids = study_course.objectIds()
991            #    except:
992            #        continue
993            #    if not level_ids:
994            #        continue
995            #    modified = True
996            #    level_ids.sort()
997            #    course_ids = getattr(study_course,level_ids[-1]).objectIds()
998            #    courses = []
999            #    for c in course_ids:
1000            #        if c.endswith('_co'):
1001            #            courses.append(c[:-3])
1002            #        else:
1003            #            courses.append(c)
1004            #    data['registered_courses'] = courses
[1702]1005            if modified:
1006                self.modifyRecord(**data)
1007        if pghandler: pghandler.finish()
1008    ###)
[1620]1009
1010    def refreshCatalog(self, clear=0, pghandler=None): ###(
1011        """ re-index everything we can find """
1012        students_folder = self.portal_url.getPortalObject().campus.students
1013        if clear:
[1724]1014            self._catalog.clear()
[1700]1015        students = self.portal_catalog(portal_type="Student")
1016        num_objects = len(students)
[2078]1017        cached_data = self.fill_certificates_dict()
[1620]1018        if pghandler:
1019            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1020        for i in xrange(num_objects):
1021            if pghandler: pghandler.report(i)
[1700]1022            student_brain = students[i]
1023            spath = student_brain.getPath()
[1727]1024            student_object = student_brain.getObject()
[1620]1025            data = {}
[1700]1026            sid = data['id'] = student_brain.getId
1027            data['review_state'] = student_brain.review_state
[1707]1028            sub_objects = False
1029            for pt in self.affected_types.keys():
1030                modified = True
1031                try:
1032                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1033                    sub_objects = True
1034                except:
[1727]1035                    #from pdb import set_trace;set_trace()
[1707]1036                    continue
1037                for field in self.affected_types[pt]['fields']:
1038                    if hasattr(self,'get_from_doc_%s' % field):
[2078]1039                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1040                                                                              cached_data=cached_data)
[1707]1041                    else:
[1727]1042                        data[field] = getattr(doc,field,None)
1043            if not sub_objects:
[1700]1044                import_res = self.returning_import(id = sid)
1045                if not import_res:
[1620]1046                    continue
[1700]1047                import_record = import_res[0]
1048                data['matric_no'] = import_record.matric_no
1049                data['sex'] = import_record.Sex == 'F'
1050                data['name'] = "%s %s %s" % (import_record.Firstname,
1051                                             import_record.Middlename,
1052                                             import_record.Lastname)
[1815]1053                data['jamb_reg_no'] = import_record.Entryregno
[2454]1054            #else:
1055            #    study_course = getattr(student_object,'study_course',None)
1056            #    current_level = data.get('level',None)
1057            #    data['registered_courses'] = []
1058            #    if study_course and current_level and current_level in study_course.objectIds():
1059            #        level_obj = getattr(study_course,current_level)
1060            #        courses = []
1061            #        for c in level_obj.objectIds():
1062            #            if c.endswith('_co'):
1063            #                courses.append(c[:-3])
1064            #            else:
1065            #                courses.append(c)
1066            #        data['registered_courses'] = courses
[1700]1067            self.addRecord(**data)
[1620]1068        if pghandler: pghandler.finish()
1069    ###)
1070
[1700]1071    security.declarePrivate('notify_event_listener') ###(
[1620]1072    def notify_event_listener(self,event_type,object,infos):
1073        "listen for events"
[1716]1074        if not infos.has_key('rpath'):
1075            return
[1702]1076        pt = getattr(object,'portal_type',None)
1077        mt = getattr(object,'meta_type',None)
[1954]1078        students_catalog = self
[1702]1079        data = {}
1080        if pt == 'Student' and\
1081           mt == 'CPS Proxy Folder' and\
1082           event_type.startswith('workflow'):
1083            data['id'] = object.getId()
1084            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1085            students_catalog.modifyRecord(**data)
1086            return
[1700]1087        rpl = infos['rpath'].split('/')
[2396]1088        if pt == 'Student' and mt == 'CPS Proxy Folder':
[1700]1089            student_id = object.id
[2396]1090            if event_type == "sys_add_object":
1091                try:
1092                    self.addRecord(id = student_id)
1093                except ValueError:
1094                    pass
1095                return
1096            elif event_type == 'sys_del_object':
1097                self.deleteRecord(student_id)
1098                #import pdb;pdb.set_trace()
[2454]1099        #elif pt == 'StudentCourseResult' and mt == 'CPS Proxy Folder':
1100        #    if event_type not in ("sys_add_object","sys_del_object"):
1101        #        return
1102        #    level_session = getattr(object.aq_parent.getContent(),'session','unknown')
1103        #    if level_session not in (self.getSessionId()[-2:],'2006/2007'):
1104        #        return
1105        #    course_id = object.getId()
1106        #    if course_id.endswith('_co'):
1107        #        course_id = course_id[:-3]
1108        #    student_id = object.absolute_url_path().split('/')[-4]
1109        #    res = students_catalog(id = student_id)
1110        #    if not res:
1111        #        return
1112        #    student_rec = res[0]
1113        #    registered_courses = getattr(student_rec,'registered_courses',None)
1114        #    if not registered_courses:
1115        #        registered_courses = []
1116        #    if event_type == "sys_add_object":
1117        #        if course_id not in registered_courses:
1118        #            registered_courses.append(course_id)
1119        #        else:
1120        #            return
1121        #    elif registered_courses and event_type == "sys_del_object":
1122        #        removed = False
1123        #        while course_id in registered_courses:
1124        #            removed = True
1125        #            registered_courses.remove(course_id)
1126        #        if not removed:
1127        #            return
1128        #    data['id'] = student_id
1129        #    data['registered_courses'] = registered_courses
1130        #    self.modifyRecord(record = student_rec, **data)
1131        #    return
[1716]1132        if pt not in self.affected_types.keys():
[1700]1133            return
[1716]1134        if event_type not in ('sys_modify_object'):
1135            return
[1700]1136        if mt == 'CPS Proxy Folder':
1137            return
[1716]1138        for field in self.affected_types[pt]['fields']:
[1700]1139            if hasattr(self,'get_from_doc_%s' % field):
1140                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1141            else:
1142                data[field] = getattr(object,field)
1143        data['id'] = rpl[2]
[1716]1144        self.modifyRecord(**data)
[1700]1145    ###)
[1620]1146
[1625]1147
[971]1148InitializeClass(StudentsCatalog)
1149
[1146]1150###)
1151
1152class CoursesCatalog(WAeUPTable): ###(
[1716]1153    security = ClassSecurityInfo()
[1146]1154
1155    meta_type = 'WAeUP Courses Catalog'
[2094]1156    name =  "courses_catalog"
[1146]1157    key = "code"
[2094]1158    def __init__(self,name=None):
1159        if name ==  None:
1160            name =  self.name
1161        WAeUPTable.__init__(self, name)
[1146]1162
[1716]1163    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1164        """ clear the catalog, then re-index everything """
[1146]1165
[1716]1166        elapse = time.time()
1167        c_elapse = time.clock()
1168
1169        pgthreshold = self._getProgressThreshold()
1170        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1171        self.refreshCatalog(clear=1, pghandler=handler)
1172
1173        elapse = time.time() - elapse
1174        c_elapse = time.clock() - c_elapse
1175
1176        RESPONSE.redirect(
1177            URL1 +
1178            '/manage_catalogAdvanced?manage_tabs_message=' +
1179            urllib.quote('Catalog Updated \n'
1180                         'Total time: %s\n'
1181                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1182    ###)
1183
1184    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1185        if isinstance(name, str):
1186            name = (name,)
1187        courses = self.portal_catalog(portal_type="Course")
1188        num_objects = len(courses)
1189        if pghandler:
1190            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1191        for i in xrange(num_objects):
1192            if pghandler: pghandler.report(i)
1193            course_brain = courses[i]
1194            course_object = course_brain.getObject()
1195            pl = course_brain.getPath().split('/')
1196            data = {}
1197            cid = data[self.key] = course_brain.getId
1198            data['faculty'] = pl[-4]
1199            data['department'] = pl[-3]
1200            doc = course_object.getContent()
1201            for field in name:
1202                if field not in (self.key,'faculty','department'):
1203                    data[field] = getattr(doc,field)
1204            self.modifyRecord(**data)
1205        if pghandler: pghandler.finish()
1206    ###)
1207
1208    def refreshCatalog(self, clear=0, pghandler=None): ###(
1209        """ re-index everything we can find """
[1724]1210        if clear:
1211            self._catalog.clear()
[1716]1212        courses = self.portal_catalog(portal_type="Course")
1213        num_objects = len(courses)
1214        if pghandler:
1215            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
[1724]1216        #from pdb import set_trace;set_trace()
[1716]1217        for i in xrange(num_objects):
1218            if pghandler: pghandler.report(i)
1219            course_brain = courses[i]
[1724]1220            course_doc = course_brain.getObject().getContent()
[1716]1221            pl = course_brain.getPath().split('/')
1222            data = {}
[1724]1223            for field in self.schema():
[1749]1224                data[field] = getattr(course_doc,field,None)
[1716]1225            data[self.key] = course_brain.getId
[1724]1226            ai = pl.index('academics')
1227            data['faculty'] = pl[ai +1]
1228            data['department'] = pl[ai +2]
1229            if clear:
1230                self.addRecord(**data)
1231            else:
1232                self.modifyRecord(**data)
[1716]1233        if pghandler: pghandler.finish()
1234    ###)
1235
1236    security.declarePrivate('notify_event_listener') ###(
1237    def notify_event_listener(self,event_type,object,infos):
1238        "listen for events"
1239        if not infos.has_key('rpath'):
1240            return
1241        pt = getattr(object,'portal_type',None)
1242        mt = getattr(object,'meta_type',None)
1243        if pt != 'Course':
1244            return
1245        data = {}
1246        rpl = infos['rpath'].split('/')
1247        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1248            return
1249        course_id = object.getId()
1250        data[self.key] = course_id
[1724]1251        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
[1716]1252            try:
1253                self.addRecord(**data)
1254            except ValueError:
[1724]1255                return
1256            course_id = object.getId()
1257            doc = object.getContent()
1258            if doc is None:
1259                return
1260            for field in self.schema():
[1749]1261                data[field] = getattr(doc,field,None)
[1724]1262            data[self.key] = course_id
1263            ai = rpl.index('academics')
1264            data['faculty'] = rpl[ai +1]
1265            data['department'] = rpl[ai +2]
1266            self.modifyRecord(**data)
1267            return
[1716]1268        if event_type == "sys_del_object":
1269            self.deleteRecord(course_id)
[1724]1270            return
[1716]1271        if event_type == "sys_modify_object" and mt == 'Course':
[1724]1272            #from pdb import set_trace;set_trace()
[1716]1273            for field in self.schema():
[1749]1274                data[field] = getattr(object,field,None)
[1716]1275            course_id = object.aq_parent.getId()
1276            data[self.key] = course_id
[1724]1277            ai = rpl.index('academics')
1278            data['faculty'] = rpl[ai +1]
1279            data['department'] = rpl[ai +2]
[1716]1280            self.modifyRecord(**data)
1281    ###)
1282
1283
[1146]1284InitializeClass(CoursesCatalog)
[1151]1285###)
[1146]1286
[2084]1287class CourseResults(WAeUPTable): ###(
[2069]1288    security = ClassSecurityInfo()
1289
1290    meta_type = 'WAeUP Results Catalog'
1291    name = "course_results"
[2084]1292    key = "key" #student_id + level + course_id
[2094]1293    def __init__(self,name=None):
1294        if name ==  None:
1295            name = self.name
1296        WAeUPTable.__init__(self, name)
[2084]1297        self._queue = []
[2099]1298
[2094]1299    def addMultipleRecords(self, records): ###(
1300        """add many records"""
1301        added_keys = []
1302        for data in records:
1303            uid = key = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1304            data['%s' % self.key] = uid
1305            res = self.searchResults({"%s" % self.key : uid})
1306            if len(res) > 0:
1307                raise ValueError("More than one record with uid %s" % uid)
1308            self.catalog_object(dict2ob(data), uid=uid)
1309        return uid
1310    ###)
1311
[2434]1312    def deleteResultsHere(self,level_id,student_id): ###(
1313        #import pdb;pdb.set_trace()
1314        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1315        course_results = self.course_results.evalAdvancedQuery(query)
1316        for result in course_results:
1317            self.deleteRecord(result.key)
[2084]1318    ###)
1319
[2434]1320    def moveResultsHere(self,level,student_id): ###(
1321        #import pdb;pdb.set_trace()
1322        level_id = level.getId()
1323        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1324        course_results = self.course_results.evalAdvancedQuery(query)
[2437]1325        existing_courses = [cr.code for cr in course_results]
[2434]1326        to_delete = []
1327        for code,obj in level.objectItems():
[2437]1328            to_delete.append(code)
[2434]1329            carry_over = False
1330            if code.endswith('_co'):
1331                carry_over = True
[2437]1332                code  = code[:-3]
[2434]1333            if code in existing_courses:
[2094]1334                continue
[2434]1335            course_result_doc = obj.getContent()
[2094]1336            data = {}
[2434]1337            course_id = code
[2094]1338            for field in self.schema():
1339                data[field] = getattr(course_result_doc,field,'')
1340            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
[2099]1341            data['student_id'] = student_id
1342            data['level_id'] = level_id
[2439]1343            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
[2442]1344            data['session_id'] = session_id
[2434]1345            #data['queue_status'] = OBJECT_CREATED
[2099]1346            data['code'] = course_id
[2434]1347            data['carry_over'] = carry_over
[2094]1348            self.catalog_object(dict2ob(data), uid=key)
[2434]1349        level.manage_delObjects(to_delete)
1350    ###)
1351
1352    def getCourses(self,student_id,level_id): ###(
[2094]1353        query = Eq('student_id',student_id) & Eq('level_id', level_id)
[2434]1354        course_results = self.course_results.evalAdvancedQuery(query)
1355        carry_overs = []
1356        normal = []
1357        credits = 0
1358        for brain in course_results:
1359            d = {}
1360            credits += int(brain.credits)
1361            for field in self.schema():
1362                d[field] = getattr(brain,field,'')
1363            #d['sheduled'] = brain.queue_status == ADDING_SHEDULED
1364            d['coe'] = 'Elective'
1365            if brain.core_or_elective:
1366                d['coe'] = 'Core'
1367            id = code = d['id'] = brain.code
1368            d['code'] = code
1369            d['title'] = self.courses_catalog.evalAdvancedQuery(Eq('code',code))[0].title
[2448]1370            if brain.carry_over:
1371                d['coe'] = 'Carry-Over'
[2434]1372                carry_overs.append(d)
1373            else:
1374                normal.append(d)
[2460]1375        normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1376                                        "%(semester)s%(code)s" % y))
1377        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1378                                             "%(semester)s%(code)s" % y))
[2434]1379        return credits,carry_overs,normal
[2094]1380    ###)
1381
[2084]1382InitializeClass(CourseResults)
[2069]1383###)
1384
[1625]1385class OnlinePaymentsImport(WAeUPTable): ###(
[1620]1386
1387    meta_type = 'WAeUP Online Payment Transactions'
[1625]1388    name = "online_payments_import"
[1620]1389    key = "order_id"
[2094]1390    def __init__(self,name=None):
1391        if name ==  None:
1392            name = self.name
1393        WAeUPTable.__init__(self, name)
[1620]1394
1395
[2069]1396InitializeClass(OnlinePaymentsImport)
[1620]1397###)
1398
[1151]1399class ReturningImport(WAeUPTable): ###(
[1146]1400
[1151]1401    meta_type = 'Returning Import Table'
1402    name = "returning_import"
[1146]1403    key = "matric_no"
[2094]1404    def __init__(self,name=None):
1405        if name ==  None:
1406            name = self.name
1407        WAeUPTable.__init__(self, name)
[1146]1408
1409
[1151]1410InitializeClass(ReturningImport)
1411###)
[1146]1412
1413class ResultsImport(WAeUPTable): ###(
1414
1415    meta_type = 'Results Import Table'
1416    name = "results_import"
1417    key = "key"
[2094]1418    def __init__(self,name=None):
1419        if name ==  None:
1420            name = self.name
1421        WAeUPTable.__init__(self, name)
[1146]1422
1423
1424InitializeClass(ResultsImport)
1425
1426###)
1427
1428class PaymentsCatalog(WAeUPTable): ###(
1429
1430    meta_type = 'WAeUP Payments Catalog'
1431    name = "students_catalog"
1432    key = "id"
[2094]1433    def __init__(self,name=None):
1434        if name ==  None:
1435            name = self.name
1436        WAeUPTable.__init__(self, name)
[1146]1437
1438
1439InitializeClass(PaymentsCatalog)
1440
1441###)
1442
[414]1443# BBB:
1444AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.