source: WAeUP_SRP/trunk/WAeUPTables.py @ 5608

Last change on this file since 5608 was 5583, checked in by Henrik Bettermann, 14 years ago

Extend Moodle data export file.

  • Property svn:keywords set to Id
File size: 74.0 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 5583 2010-11-10 08:57:15Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re,os
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record,index): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            v_dump = v
98            if key == 'sex':
99                if v == True:
100                    v_dump = 'F'
101                elif v == False:
102                    v_dump = 'M'
103                d[key] = v_dump
104            elif v:
105                if index == 'translate':
106                    if key == 'lga':
107                        v_dump = self.portal_vocabularies.local_gov_areas.get(v)
108                        if not v_dump:
109                            v_dump = v
110                    elif key == 'aos':
111                        v_dump = self.portal_vocabularies.aos.get(v)
112                d[key] = v_dump
113            else:
114                d[key] = ''
115        return d
116
117###)
118
119    def addRecord(self, **data): ###(
120        # The uid is the same as "bed".
121        uid = data[self.key]
122        res = self.searchResults({"%s" % self.key : uid})
123        if len(res) > 0:
124            raise ValueError("More than one record with uid %s" % uid)
125        self.catalog_object(dict2ob(data), uid=uid)
126        return uid
127
128###)
129
130    def deleteRecord(self, uid):
131        self.uncatalog_object(uid)
132
133    def getRecordByKey(self,key):
134        if not key:
135            return None
136        res = self.evalAdvancedQuery(Eq(self.key,key))
137        if res:
138            return res[0]
139        return None
140
141    def searchAndSetRecord(self, **data):
142        raise NotImplemented
143
144    def modifyRecord(self, record=None, **data): ###(
145        #records = self.searchResults(uid=uid)
146        uid = data[self.key]
147        if record is None:
148            records = self.searchResults({"%s" % self.key : uid})
149            if len(records) > 1:
150                # Can not happen, but anyway...
151                raise ValueError("More than one record with uid %s" % uid)
152            if len(records) == 0:
153                raise KeyError("No record for uid %s" % uid)
154            record = records[0]
155        record_data = {}
156        for field in self.schema() + self.indexes():
157            record_data[field] = getattr(record, field)
158        # Add the updated data:
159        record_data.update(data)
160        self.catalog_object(dict2ob(record_data), uid)
161
162###)
163
164    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
165        if isinstance(name, str):
166            name =  (name,)
167        paths = self._catalog.uids.items()
168        i = 0
169        #import pdb;pdb.set_trace()
170        for p,rid in paths:
171            i += 1
172            metadata = self.getMetadataForRID(rid)
173            record_data = {}
174            for field in name:
175                record_data[field] = metadata.get(field)
176            uid = metadata.get(self.key)
177            self.catalog_object(dict2ob(record_data), uid, idxs=name,
178                                update_metadata=0)
179
180###)
181
182    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
183    def exportAllRecords(self):
184        "export a WAeUPTable"
185        #import pdb;pdb.set_trace()
186        fields = [field for field in self.schema()]
187        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
188        csv = []
189        csv.append(','.join(['"%s"' % fn for fn in fields]))
190        for uid in self._catalog.uids:
191            records = self.searchResults({"%s" % self.key : uid})
192            if len(records) > 1:
193                # Can not happen, but anyway...
194                raise ValueError("More than one record with uid %s" % uid)
195            if len(records) == 0:
196                raise KeyError("No record for uid %s" % uid)
197            rec = records[0]
198            csv.append(format % rec)
199        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
200        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
201
202###)
203
204    security.declareProtected(ModifyPortalContent,"dumpAll")###(
205    def dumpAll(self,index=None,value=None):
206        """dump all data in the table to a csv"""
207        member = self.portal_membership.getAuthenticatedMember()
208        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
209        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
210        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
211        res_list = []
212        lines = []
213        if hasattr(self,"export_keys"):
214            fields = self.export_keys
215        else:
216            fields = []
217            for f in self.schema():
218                fields.append(f)
219        headline = ','.join(fields)
220        out = open(export_file,"wb")
221        out.write(headline +'\n')
222        out.close()
223        out = open(export_file,"a")
224        csv_writer = csv.DictWriter(out,fields,)
225        if index is not None and value is not None:
226            records = self.evalAdvancedQuery(Eq(index,value))
227        else:
228            records = self()
229        nr2export = len(records)
230        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
231        chunk = 2000
232        total = 0
233        start = DateTime.DateTime().timeTime()
234        start_chunk = DateTime.DateTime().timeTime()
235        for record in records:
236            not_all = False
237            d = self.record2dict(fields,record,index)
238            lines.append(d)
239            total += 1
240            if total and not total % chunk or total == len(records):
241                csv_writer.writerows(lines)
242                anz = len(lines)
243                logger.info("wrote %(anz)d  total written %(total)d" % vars())
244                end_chunk = DateTime.DateTime().timeTime()
245                duration = end_chunk-start_chunk
246                per_record = duration/anz
247                till_now = end_chunk - start
248                avarage_per_record = till_now/total
249                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
250                estimated_end = estimated_end.strftime("%H:%M:%S")
251                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
252                start_chunk = DateTime.DateTime().timeTime()
253                lines = []
254        end = DateTime.DateTime().timeTime()
255        logger.info('total time %6.2f m' % ((end-start)/60))
256        import os
257        filename, extension = os.path.splitext(export_file)
258        from subprocess import call
259        msg = "wrote %(total)d records to %(export_file)s" % vars()
260        #try:
261        #    retcode = call('gzip %s' % (export_file),shell=True)
262        #    if retcode == 0:
263        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
264        #except OSError, e:
265        #    retcode = -99
266        #    logger.info("zip failed with %s" % e)
267        logger.info(msg)
268        args = {'portal_status_message': msg}
269        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
270        url = self.REQUEST.get('URL2')
271        return 'ready'
272        #return self.REQUEST.RESPONSE.redirect(url)
273    ###)
274
275
276    security.declarePrivate("_import") ###(
277    def _import_new(self,csv_items,schema, layout, mode,logger):
278        "import data from csv.Dictreader Instance"
279        start = True
280        tr_count = 1
281        total_imported = 0
282        total_not_imported = 0
283        total = 0
284        iname =  "%s" % filename
285        not_imported = []
286        valid_records = []
287        invalid_records = []
288        duplicate_records = []
289        d = {}
290        d['mode'] = mode
291        d['valid_records'] = valid_records
292        d['invalid_records'] = invalid_records
293        d['invalid_records'] = duplicate_records
294        # d['import_fn'] = import_fn
295        # d['imported_fn'] = imported_fn
296        # d['not_imported_fn'] = not_imported_fn
297        validators = {}
298        for widget in layout.keys():
299            try:
300                validators[widget] = layout[widget].validate
301            except AttributeError:
302                logger.info('%s has no validate attribute' % widget)
303                return d
304        for item in csv_items:
305            if start:
306                start = False
307                logger.info('%s starts import from %s.csv' % (member,filename))
308                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
309                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
310                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
311                diff2schema = set(import_keys).difference(set(schema.keys()))
312                diff2layout = set(import_keys).difference(set(layout.keys()))
313                if diff2layout:
314                    em = "not ignorable key(s) %s found in heading" % diff2layout
315                    logger.info(em)
316                    return d
317                # s = ','.join(['"%s"' % fn for fn in import_keys])
318                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
319                # #s = '"id",' + s
320                # open(imported_fn,"a").write(s + '\n')
321                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
322                # format_error = format + ',"%(Error)s"'
323                # #format = '"%(id)s",'+ format
324                adapters = [MappingStorageAdapter(schema, item)]
325            dm = DataModel(item, adapters,context=self)
326            ds = DataStructure(data=item,datamodel=dm)
327            error_string = ""
328            for k in import_keys:
329                if not validators[k](ds,mode=mode):
330                    error_string += " %s : %s" % (k,ds.getError(k))
331            if error_string:
332                item['Error'] = error_string
333                #invalid_records.append(dm)
334                invalid_records.append(item)
335                total_not_imported += 1
336            else:
337                em = format % item
338                valid_records.append(dm)
339                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
340                tr_count += 1
341                total_imported += 1
342            total += 1
343        # if len(imported) > 0:
344        #     open(imported_fn,"a").write('\n'.join(imported))
345        # if len(not_imported) > 0:
346        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
347        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
348        d['imported'] = total_imported
349        d['not_imported'] = total_not_imported
350        d['valid_records'] = valid_records
351        d['invalid_records'] = invalid_records
352        return d
353    ###)
354
355    security.declarePublic("missingValue")###(
356    def missingValue(self):
357        from Missing import MV
358        return MV
359    ###)
360###)
361
362class AccommodationTable(WAeUPTable): ###(
363
364    meta_type = 'WAeUP Accommodation Tool'
365    name = "portal_accommodation"
366    key = "bed"
367    not_occupied = NOT_OCCUPIED
368    def __init__(self,name=None):
369        if name ==  None:
370            name = self.name
371        WAeUPTable.__init__(self, name)
372
373    def searchAndReserveBed(self, student_id,bed_type,random_order=False): ###(
374        logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
375        records = self.evalAdvancedQuery(Eq('student',student_id))
376        if len(records) == 1:
377            #return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
378            logger.info('%s found (reserved) bed %s' % (student_id,records[0].bed))
379            return -1,records[0].bed
380        elif len(records) > 1:
381            logger.info('%s found more than one (reserved) bed' % (student_id))
382            return -3,'more than one bed'
383        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
384        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
385        if len(records) == 0:
386            logger.info('no bed %s available for %s' % (bed_type,student_id))
387            return -2,"no bed"
388        if random_order:
389            import random
390            bed_no = random.randint(0,len(records)-1)
391        else:
392            bed_no = 0
393        rec = records[bed_no]
394        self.modifyRecord(bed=rec.bed,student=student_id)
395        logger.info('%s booked bed %s' % (student_id,rec.bed))
396        return 1,rec.bed
397    ###)
398
399
400InitializeClass(AccommodationTable)
401
402###)
403
404class PinTable(WAeUPTable): ###(
405    from ZODB.POSException import ConflictError
406    security = ClassSecurityInfo()
407    meta_type = 'WAeUP Pin Tool'
408    name = "portal_pins"
409    key = 'pin'
410
411    def __init__(self,name=None):
412        if name ==  None:
413            name = self.name
414        WAeUPTable.__init__(self, name)
415
416    security.declareProtected(ModifyPortalContent,"dumpAll")###(
417    def dumpAll(self,include_unused=None,index=None):
418        """dump all data in the table to a csv"""
419        member = self.portal_membership.getAuthenticatedMember()
420        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
421        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
422        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
423        res_list = []
424        lines = []
425        if hasattr(self,"export_keys"):
426            fields = self.export_keys
427        else:
428            fields = []
429            for f in self.schema():
430                fields.append(f)
431        headline = ','.join(fields)
432        out = open(export_file,"wb")
433        out.write(headline +'\n')
434        out.close()
435        out = open(export_file,"a")
436        csv_writer = csv.DictWriter(out,fields,)
437        if include_unused is not None and str(member) not in ('admin'):
438            logger.info('%s tries to dump pintable with unused pins' % (member))
439            return
440        if include_unused is not None:
441            records = self()
442        else:
443            records = self.evalAdvancedQuery(~Eq('student',''))
444        nr2export = len(records)
445        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
446        chunk = 2000
447        total = 0
448        start = DateTime.DateTime().timeTime()
449        start_chunk = DateTime.DateTime().timeTime()
450        for record in records:
451            not_all = False
452            d = self.record2dict(fields,record,index)
453            lines.append(d)
454            total += 1
455            if total and not total % chunk or total == len(records):
456                csv_writer.writerows(lines)
457                anz = len(lines)
458                logger.info("wrote %(anz)d  total written %(total)d" % vars())
459                end_chunk = DateTime.DateTime().timeTime()
460                duration = end_chunk-start_chunk
461                per_record = duration/anz
462                till_now = end_chunk - start
463                avarage_per_record = till_now/total
464                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
465                estimated_end = estimated_end.strftime("%H:%M:%S")
466                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
467                start_chunk = DateTime.DateTime().timeTime()
468                lines = []
469        end = DateTime.DateTime().timeTime()
470        logger.info('total time %6.2f m' % ((end-start)/60))
471        import os
472        filename, extension = os.path.splitext(export_file)
473        from subprocess import call
474        msg = "wrote %(total)d records to %(export_file)s" % vars()
475        #try:
476        #    retcode = call('gzip %s' % (export_file),shell=True)
477        #    if retcode == 0:
478        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
479        #except OSError, e:
480        #    retcode = -99
481        #    logger.info("zip failed with %s" % e)
482        logger.info(msg)
483        args = {'portal_status_message': msg}
484        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
485        url = self.REQUEST.get('URL2')
486        return self.REQUEST.RESPONSE.redirect(url)
487    ###)
488
489
490
491    def searchAndSetRecord(self, uid, student_id,prefix):
492
493        # The following line must be activated after resetting the
494        # the portal_pins table. This is to avoid duplicate entries
495        # and disable duplicate payments.
496
497        #student_id = student_id.upper()
498
499        #records = self.searchResults(student = student_id)
500        #if len(records) > 0 and prefix in ('CLR','APP'):
501        #    for r in records:
502        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
503        #            return -2
504        records = self.searchResults({"%s" % self.key : uid})
505        if len(records) > 1:
506            # Can not happen, but anyway...
507            raise ValueError("More than one record with uid %s" % uid)
508        if len(records) == 0:
509            return -1,None
510        record = records[0]
511        if record.student == "":
512            record_data = {}
513            for field in self.schema() + self.indexes():
514                record_data[field] = getattr(record, field)
515            # Add the updated data:
516            record_data['student'] = student_id
517            try:
518                self.catalog_object(dict2ob(record_data), uid)
519                return 1,record
520            except ConflictError:
521                return 2,record
522        if record.student.upper() != student_id.upper():
523            return 0,record
524        if record.student.upper() == student_id.upper():
525            return 2,record
526        return -3,record
527InitializeClass(PinTable)
528###)
529
530class PumeResultsTable(WAeUPTable): ###(
531
532    meta_type = 'WAeUP PumeResults Tool'
533    name = "portal_pumeresults"
534    key = "jamb_reg_no"
535    def __init__(self,name=None):
536        if name ==  None:
537            name = self.name
538        WAeUPTable.__init__(self, name)
539
540
541InitializeClass(PumeResultsTable)
542
543###)
544
545class ApplicantsCatalog(WAeUPTable): ###(
546
547    meta_type = 'WAeUP Applicants Catalog'
548    name = "applicants_catalog"
549    key = "reg_no"
550    security = ClassSecurityInfo()
551    #export_keys = (
552    #               "reg_no",
553    #               "status",
554    #               "lastname",
555    #               "sex",
556    #               "date_of_birth",
557    #               "lga",
558    #               "email",
559    #               "phone",
560    #               "passport",
561    #               "entry_mode",
562    #               "pin",
563    #               "screening_type",
564    #               "registration_date",
565    #               "testdate",
566    #               "application_date",
567    #               "screening_date",
568    #               "faculty",
569    #               "department",
570    #               "course1",
571    #               "course2",
572    #               "course3",
573    #               "eng_score",
574    #               "subj1",
575    #               "subj1score",
576    #               "subj2",
577    #               "subj2score",
578    #               "subj3",
579    #               "subj3score",
580    #               "aggregate",
581    #               "course_admitted",
582    #               )
583
584    def __init__(self,name=None):
585        if name ==  None:
586            name = self.name
587        WAeUPTable.__init__(self, name)
588
589    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
590    def new_importCSV(self,filename="JAMB_data",
591                  schema_id="application",
592                  layout_id="import_application",
593                  mode='add'):
594        """ import JAMB data """
595        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
596        pm = self.portal_membership
597        member = pm.getAuthenticatedMember()
598        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
599        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
600        import_fn = "%s/import/%s.csv" % (i_home,filename)
601        if mode not in ('add','edit'):
602            logger.info("invalid mode: %s" % mode)
603        if os.path.exists(lock_fn):
604            logger.info("import of %(import_fn)s already in progress" % vars())
605            return
606        lock_file = open(lock_fn,"w")
607        lock_file.write("%(current)s \n" % vars())
608        lock_file.close()
609        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
610        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
611        stool = getToolByName(self, 'portal_schemas')
612        ltool = getToolByName(self, 'portal_layouts')
613        schema = stool._getOb(schema_id)
614        if schema is None:
615            em = 'No such schema %s' % schema_id
616            logger.error(em)
617            return
618        for postfix in ('_import',''):
619            layout_name = "%(layout_id)s%(postfix)s" % vars()
620            if hasattr(ltool,layout_name):
621                break
622        layout = ltool._getOb(layout_name)
623        if layout is None:
624            em = 'No such layout %s' % layout_id
625            logger.error(em)
626            return
627        try:
628            csv_file = csv.DictReader(open(import_fn,"rb"))
629        except:
630            em = 'Error reading %s.csv' % filename
631            logger.error(em)
632            return
633        d = self._import_new(csv_items,schema,layout,mode,logger)
634        imported = []
635        edited = []
636        duplicates = []
637        not_found = []
638        if len(d['valid_records']) > 0:
639            for record in d['valid_records']:
640                #import pdb;pdb.set_trace()
641                if mode == "add":
642                    try:
643                        self.addRecord(**dict(record.items()))
644                        imported.append(**dict(record.items()))
645                        logger.info("added %s" % record.items())
646                    except ValueError:
647                        dupplicate.append(**dict(record.items()))
648                        logger.info("duplicate %s" % record.items())
649                elif mode == "edit":
650                    try:
651                        self.modifyRecord(**dict(record.items()))
652                        edited.append(**dict(record.items()))
653                        logger.info("edited %s" % record.items())
654                    except KeyError:
655                        not_found.append(**dict(record.items()))
656                        logger.info("not found %s" % record.items())
657        invalid = d['invalid_records']
658        for itype in ("imported","edited","not_found","duplicate","invalid"):
659            outlist = locals[itype]
660            if len(outlist):
661                d = {}
662                for k in outlist[0].keys():
663                    d[k] = k
664                outlist[0] = d
665                outfile = open("file_name_%s" % itype,'w')
666                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
667                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
668###)
669
670    security.declareProtected(ModifyPortalContent,"importCSV")###(
671    def importCSV(self,filename="JAMB_data",
672                  schema_id="application",
673                  layout_id="application_pce",
674                  mode='add'):
675        """ import JAMB data """
676        stool = getToolByName(self, 'portal_schemas')
677        ltool = getToolByName(self, 'portal_layouts')
678        schema = stool._getOb(schema_id)
679        if schema is None:
680            em = 'No such schema %s' % schema_id
681            logger.error(em)
682            return
683        layout = ltool._getOb(layout_id)
684        if layout is None:
685            em = 'No such layout %s' % layout_id
686            logger.error(em)
687            return
688        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
689        d = self._import_old(filename,schema,layout,mode,logger)
690        if len(d['valid_records']) > 0:
691            for record in d['valid_records']:
692                #import pdb;pdb.set_trace()
693                if mode == "add":
694                    self.addRecord(**dict(record.items()))
695                    logger.info("added %s" % record.items())
696                elif mode == "edit":
697                    self.modifyRecord(**dict(record.items()))
698                    logger.info("edited %s" % record.items())
699                else:
700                    logger.info("invalid mode: %s" % mode)
701        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
702    ###)
703
704InitializeClass(ApplicantsCatalog)
705
706###)
707
708class StudentsCatalog(WAeUPTable): ###(
709    security = ClassSecurityInfo()
710
711    meta_type = 'WAeUP Students Catalog'
712    name = "students_catalog"
713    key = "id"
714    affected_types = {   ###(
715                      'StudentApplication':
716                      {'id': 'application',
717                       'fields':
718                       ('jamb_reg_no',
719                        'entry_mode',
720                        #'entry_level',
721                        'entry_session',
722                       )
723                      },
724                      'StudentClearance':
725                      {'id': 'clearance',
726                       'fields':
727                       ('matric_no',
728                        'lga',
729                        'date_of_birth',  # birthday
730                       )
731                      },
732                      'StudentPersonal':
733                      {'id': 'personal',
734                       'fields':
735                       ('name',
736                        'sex',
737                        'perm_address',
738                        'email',
739                        'phone',
740                        'marit_stat',
741                        'firstname',
742                        'middlename',
743                        'lastname',
744                       )
745                      },
746                      'StudentStudyCourse':
747                      {'id': 'study_course',
748                       'fields':
749                       ('course', # study_course
750                        'faculty', # from certificate
751                        'department', # from certificate
752                        'end_level', # from certificate
753                        'level', # current_level
754                        'mode',  # from certificate
755                        'session', # current_session
756                        'verdict', # current_verdict
757                       )
758                      },
759                     }
760    ###)
761
762    def __init__(self,name=None):
763        if name ==  None:
764            name = self.name
765        WAeUPTable.__init__(self, name)
766        return
767       
768    security.declareProtected(ModifyPortalContent,"exportMoodleData") ###(
769    def exportMoodleData(self):
770        "export a WAeUPTable"
771        member = self.portal_membership.getAuthenticatedMember()
772        logger = logging.getLogger('WAeUPTables.StudentsCatalog.exportMoodleData')
773        if str(member) not in ('admin'):
774            logger.info('%s tries to dump Moodle Data' % (member))
775            return       
776        fields = [field for field in self.schema()]
777        export_fields = ['username','firstname','lastname','email','currentsession','regstate','password','city','country','deleted']
778        format = ','.join(['"%%(%s)s"' % fn for fn in export_fields])
779        csv = []
780        csv.append(','.join(['"%s"' % fn for fn in export_fields]))
781        #import pdb;pdb.set_trace()
782        for uid in self._catalog.uids:
783            records = self.searchResults({"%s" % self.key : uid})
784            if len(records) > 1:
785                # Can not happen, but anyway...
786                raise ValueError("More than one record with uid %s" % uid)
787            if len(records) == 0:
788                raise KeyError("No record for uid %s" % uid)
789            rec = self.record2dict(fields,records[0],None)   
790            fn = rec['firstname'].replace(',',' ') 
791            mn = rec['middlename'].replace(',',' ')
792            ln = rec['lastname'].replace(',',' ')
793            rec['firstname'] = fn + ' ' + mn
794            rec['lastname'] = ln
795            rec['regstate'] = rec['review_state']
796            rec['currentsession'] = rec['session']
797            rec['username'] = rec['id']
798            rec['city'] = ''
799            rec['country'] = 'ng'
800            rec['deleted'] = '0'
801            rec['password'] = self.waeup_tool.getCredential(rec['id'])
802            csv.append(format % rec)
803        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
804        open("%s/export/moodle-%s.csv" % (i_home,current),"w+").write('\n'.join(csv))       
805        return 'ready'
806
807    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
808        """ clears the whole enchilada """
809        self._catalog.clear()
810
811        if REQUEST and RESPONSE:
812            RESPONSE.redirect(
813              URL1 +
814              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
815
816    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
817        """ clear the catalog, then re-index everything """
818
819        elapse = time.time()
820        c_elapse = time.clock()
821
822        pgthreshold = self._getProgressThreshold()
823        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
824        self.refreshCatalog(clear=1, pghandler=handler)
825
826        elapse = time.time() - elapse
827        c_elapse = time.clock() - c_elapse
828
829        RESPONSE.redirect(
830            URL1 +
831            '/manage_catalogAdvanced?manage_tabs_message=' +
832            urllib.quote('Catalog Updated \n'
833                         'Total time: %s\n'
834                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
835    ###)
836
837    def fill_certificates_dict(self): ###(
838        "return certificate data in  dict"
839        certificates_brains = self.portal_catalog(portal_type ='Certificate')
840        d = {}
841        for cb in certificates_brains:
842            certificate_doc = cb.getObject().getContent()
843            cb_path = cb.getPath().split('/')
844            ld = {}
845            ld['faculty'] = cb_path[-4]
846            ld['department'] = cb_path[-3]
847            ld['end_level'] = getattr(certificate_doc,'end_level','999')
848            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
849            d[cb.getId] = ld
850        return d
851    ###)
852
853    def get_from_doc_department(self,doc,cached_data={}): ###(
854        "return the students department"
855        if doc is None:
856            return None
857        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
858            return self._v_certificates[doc.study_course]['department']
859        certificate_res = self.portal_catalog(id = doc.study_course)
860        if len(certificate_res) != 1:
861            return None
862        return certificate_res[0].getPath().split('/')[-3]
863
864    def get_from_doc_faculty(self,doc,cached_data={}):
865        "return the students faculty"
866        if doc is None:
867            return None
868        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
869            return self._v_certificates[doc.study_course]['faculty']
870        certificate_res = self.portal_catalog(id = doc.study_course)
871        if len(certificate_res) != 1:
872            return None
873        return certificate_res[0].getPath().split('/')[-4]
874
875    def get_from_doc_end_level(self,doc,cached_data={}):
876        "return the students end_level"
877        if doc is None:
878            return None
879        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
880            return self._v_certificates[doc.study_course]['end_level']
881        certificate_res = self.portal_catalog(id = doc.study_course)
882        if len(certificate_res) != 1:
883            return None
884        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
885
886    def get_from_doc_level(self,doc,cached_data={}):
887        "return the students level"
888        if doc is None:
889            return None
890        return getattr(doc,'current_level',None)
891
892    #def get_from_doc_mode(self,doc,cached_data={}):
893    #    "return the students mode"
894    #    if doc is None:
895    #        return None
896    #    cm = getattr(doc,'current_mode',None)
897    #    return cm
898   
899    def get_from_doc_mode(self,doc,cached_data={}):
900        "return the students mode"
901        if doc is None:
902            return None
903        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
904            return self._v_certificates[doc.study_course]['study_mode']
905        certificate_res = self.portal_catalog(id = doc.study_course)
906        if len(certificate_res) != 1:
907            return None
908        return getattr(certificate_res[0].getObject().getContent(),'study_mode','unknown')   
909
910
911    def get_from_doc_marit_stat(self,doc,cached_data={}):
912        "return the students marit_stat"
913        if doc is None:
914            return None
915        ms = getattr(doc,'marit_stat',None)
916        if ms == True:
917            return 'married'
918        elif ms == False:
919            return 'single'
920        else:
921            return 'undefined'
922           
923    def get_from_doc_date_of_birth(self,doc,cached_data={}):
924        "return the students date of birth"
925        if doc is None:
926            return None
927        return getattr(doc,'birthday',None)           
928
929    def get_from_doc_session(self,doc,cached_data={}):
930        "return the students current_session"
931        if doc is None:
932            return None
933        return getattr(doc,'current_session',None)
934
935    def get_from_doc_entry_session(self,doc,cached_data={}):
936        "return the students entry_session"
937        if doc is None:
938            return None
939        es = getattr(doc,'entry_session',None)
940        if es is not None and len(es) < 3:
941            return es
942        elif len(es) == 9:
943            return es[2:4]   
944        try:
945            digit = int(doc.jamb_reg_no[0])
946        except:
947            return "-1"
948        if digit < 9:
949            return "0%c" % doc.jamb_reg_no[0]
950        return "9%c" % doc.jamb_reg_no[0]
951
952    def get_from_doc_course(self,doc,cached_data={}):
953        "return the students study_course"
954        if doc is None:
955            return None
956        return getattr(doc,'study_course',None)
957
958    def get_from_doc_name(self,doc,cached_data={}):
959        "return the students name from the personal"
960        if doc is None:
961            return None
962        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
963
964    def get_from_doc_verdict(self,doc,cached_data={}):
965        "return the students study_course"
966        if doc is None:
967            return None
968        return getattr(doc,'current_verdict',None)
969    ###)
970
971    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
972        if not hasattr(self,'_v_certificates'):
973            self._v_certificates = self.fill_certificates_dict()
974        if isinstance(name, str):
975            name = (name,)
976        reindextypes = {}
977        reindex_special = []
978        for n in name:
979            if n in ("review_state"):
980                reindex_special.append(n)
981            else:
982                for pt in self.affected_types.keys():
983                    if n in self.affected_types[pt]['fields']:
984                        if reindextypes.has_key(pt):
985                            reindextypes[pt].append(n)
986                        else:
987                            reindextypes[pt]= [n]
988                        break
989        #cached_data = {}
990        #if set(name).intersection(set(('faculty','department','end_level','mode'))):
991        #    cached_data = self.fill_certificates_dict()
992        students = self.portal_catalog(portal_type="Student")
993        if hasattr(self,'portal_catalog_real'):
994            aq_portal = self.portal_catalog_real.evalAdvancedQuery
995        else:
996            aq_portal = self.portal_catalog.evalAdvancedQuery
997        num_objects = len(students)
998        if pghandler:
999            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1000        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
1001        #import pdb;pdb.set_trace()
1002        for i in xrange(num_objects):
1003            if pghandler: pghandler.report(i)
1004            student_brain = students[i]
1005            student_object = student_brain.getObject()
1006            data = {}
1007            modified = False
1008            sid = data['id'] = student_brain.getId
1009            if reindex_special and 'review_state' in reindex_special:
1010                modified = True
1011                data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1012            sub_objects = False
1013            for pt in reindextypes.keys():
1014                modified = True
1015                try:
1016                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1017                    sub_objects = True
1018                except:
1019                    continue
1020                for field in set(name).intersection(self.affected_types[pt]['fields']):
1021                    if hasattr(self,'get_from_doc_%s' % field):
1022                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc)
1023                    else:
1024                        data[field] = getattr(doc,field)
1025            if not sub_objects and noattr:
1026                import_res = self.returning_import(id = sid)
1027                if not import_res:
1028                    continue
1029                import_record = import_res[0]
1030                data['matric_no'] = import_record.matric_no
1031                data['sex'] = import_record.Sex == 'F'
1032                data['name'] = "%s %s %s" % (import_record.Firstname,
1033                                             import_record.Middlename,
1034                                             import_record.Lastname)
1035                data['jamb_reg_no'] = import_record.Entryregno
1036            if modified:
1037                self.modifyRecord(**data)
1038        if pghandler: pghandler.finish()
1039    ###)
1040
1041    def refreshCatalog(self, clear=0, pghandler=None): ###(
1042        """ re-index everything we can find """
1043        students_folder = self.portal_url.getPortalObject().campus.students
1044        if clear:
1045            self._catalog.clear()
1046        students = self.portal_catalog(portal_type="Student")
1047        num_objects = len(students)
1048        #cached_data = self.fill_certificates_dict()
1049        if not hasattr(self,'_v_certificates'):
1050            self._v_certificates = self.fill_certificates_dict()
1051        if pghandler:
1052            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1053        for i in xrange(num_objects):
1054            if pghandler: pghandler.report(i)
1055            student_brain = students[i]
1056            spath = student_brain.getPath()
1057            student_object = student_brain.getObject()
1058            data = {}
1059            sid = data['id'] = student_brain.getId
1060            #data['review_state'] = student_brain.review_state
1061            data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1062            sub_objects = False
1063            for pt in self.affected_types.keys():
1064                modified = True
1065                try:
1066                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1067                    sub_objects = True
1068                except:
1069                    #from pdb import set_trace;set_trace()
1070                    continue
1071                for field in self.affected_types[pt]['fields']:
1072                    if hasattr(self,'get_from_doc_%s' % field):
1073                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1074                                                                              cached_data=cached_data)
1075                    else:
1076                        data[field] = getattr(doc,field,None)
1077            if not sub_objects:
1078                import_res = self.returning_import(id = sid)
1079                if not import_res:
1080                    continue
1081                import_record = import_res[0]
1082                data['matric_no'] = import_record.matric_no
1083                data['sex'] = import_record.Sex == 'F'
1084                data['name'] = "%s %s %s" % (import_record.Firstname,
1085                                             import_record.Middlename,
1086                                             import_record.Lastname)
1087                data['jamb_reg_no'] = import_record.Entryregno
1088            self.addRecord(**data)
1089        if pghandler: pghandler.finish()
1090    ###)
1091
1092    security.declarePrivate('notify_event_listener') ###(
1093    def notify_event_listener(self,event_type,object,infos):
1094        "listen for events"
1095        if not infos.has_key('rpath'):
1096            return
1097        pt = getattr(object,'portal_type',None)
1098        mt = getattr(object,'meta_type',None)
1099        students_catalog = self
1100        data = {}
1101        if pt == 'Student' and\
1102           mt == 'CPS Proxy Folder' and\
1103           event_type.startswith('workflow'):
1104            data['id'] = object.getId()
1105            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1106            students_catalog.modifyRecord(**data)
1107            return
1108        rpl = infos['rpath'].split('/')
1109        if pt == 'Student' and mt == 'CPS Proxy Folder':
1110            student_id = object.id
1111            if event_type == "sys_add_object":
1112                try:
1113                    self.addRecord(id = student_id)
1114                except ValueError:
1115                    pass
1116                return
1117            elif event_type == 'sys_del_object':
1118                self.deleteRecord(student_id)
1119        if pt not in self.affected_types.keys():
1120            return
1121        if event_type not in ('sys_modify_object'):
1122            return
1123        if mt == 'CPS Proxy Folder':
1124            return
1125        if not hasattr(self,'_v_certificates'):
1126            self._v_certificates = self.fill_certificates_dict()
1127        for field in self.affected_types[pt]['fields']:
1128            if hasattr(self,'get_from_doc_%s' % field):
1129                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1130            else:
1131                data[field] = getattr(object,field)
1132        data['id'] = rpl[2]
1133        self.modifyRecord(**data)
1134    ###)
1135
1136
1137InitializeClass(StudentsCatalog)
1138
1139###)
1140
1141class CertificatesCatalog(WAeUPTable): ###(
1142    security = ClassSecurityInfo()
1143
1144    meta_type = 'WAeUP Certificates Catalog'
1145    name =  "certificates_catalog"
1146    key = "code"
1147    def __init__(self,name=None):
1148        if name ==  None:
1149            name =  self.name
1150        WAeUPTable.__init__(self, name)
1151
1152    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1153        """ clear the catalog, then re-index everything """
1154
1155        elapse = time.time()
1156        c_elapse = time.clock()
1157
1158        pgthreshold = self._getProgressThreshold()
1159        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1160        self.refreshCatalog(clear=1, pghandler=handler)
1161
1162        elapse = time.time() - elapse
1163        c_elapse = time.clock() - c_elapse
1164
1165        RESPONSE.redirect(
1166            URL1 +
1167            '/manage_catalogAdvanced?manage_tabs_message=' +
1168            urllib.quote('Catalog Updated \n'
1169                         'Total time: %s\n'
1170                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1171    ###)
1172
1173    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1174        if isinstance(name, str):
1175            name = (name,)
1176        certificates = self.portal_catalog(portal_type="Certificate")
1177        num_objects = len(certificates)
1178        if pghandler:
1179            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1180        for i in xrange(num_objects):
1181            if pghandler: pghandler.report(i)
1182            certificate_brain = certificates[i]
1183            certificate_object = certificate_brain.getObject()
1184            pl = certificate_brain.getPath().split('/')
1185            data = {}
1186            cid = data[self.key] = certificate_brain.getId
1187            data['faculty'] = pl[-4]
1188            data['department'] = pl[-3]
1189            doc = certificate_object.getContent()
1190            for field in name:
1191                if field not in (self.key,'faculty','department'):
1192                    data[field] = getattr(doc,field)
1193            self.modifyRecord(**data)
1194        if pghandler: pghandler.finish()
1195    ###)
1196
1197    def refreshCatalog(self, clear=0, pghandler=None): ###(
1198        """ re-index everything we can find """
1199        if clear:
1200            self._catalog.clear()
1201        certificates = self.portal_catalog(portal_type="Certificate")
1202        num_objects = len(certificates)
1203        if pghandler:
1204            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1205        #from pdb import set_trace;set_trace()
1206        for i in xrange(num_objects):
1207            if pghandler: pghandler.report(i)
1208            certificate_brain = certificates[i]
1209            certificate_doc = certificate_brain.getObject().getContent()
1210            pl = certificate_brain.getPath().split('/')
1211            data = {}
1212            for field in self.schema():
1213                data[field] = getattr(certificate_doc,field,None)
1214            data[self.key] = certificate_brain.getId
1215            ai = pl.index('academics')
1216            data['faculty'] = pl[ai +1]
1217            data['department'] = pl[ai +2]
1218            if clear:
1219                self.addRecord(**data)
1220            else:
1221                self.modifyRecord(**data)
1222        if pghandler: pghandler.finish()
1223    ###)
1224
1225    security.declarePrivate('notify_event_listener') ###(
1226    def notify_event_listener(self,event_type,object,infos):
1227        "listen for events"
1228        if not infos.has_key('rpath'):
1229            return
1230        pt = getattr(object,'portal_type',None)
1231        mt = getattr(object,'meta_type',None)
1232        if pt != 'Certificate':
1233            return
1234        data = {}
1235        rpl = infos['rpath'].split('/')
1236        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1237            return
1238        certificate_id = object.getId()
1239        data[self.key] = certificate_id
1240        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1241            try:
1242                self.addRecord(**data)
1243            except ValueError:
1244                return
1245            certificate_id = object.getId()
1246            doc = object.getContent()
1247            if doc is None:
1248                return
1249            for field in self.schema():
1250                data[field] = getattr(doc,field,None)
1251            data[self.key] = certificate_id
1252            ai = rpl.index('academics')
1253            data['faculty'] = rpl[ai +1]
1254            data['department'] = rpl[ai +2]
1255            self.modifyRecord(**data)
1256            return
1257        if event_type == "sys_del_object":
1258            self.deleteRecord(certificate_id)
1259            return
1260        if event_type == "sys_modify_object" and mt == 'Certificate':
1261            #from pdb import set_trace;set_trace()
1262            for field in self.schema():
1263                data[field] = getattr(object,field,None)
1264            certificate_id = object.aq_parent.getId()
1265            data[self.key] = certificate_id
1266            ai = rpl.index('academics')
1267            data['faculty'] = rpl[ai +1]
1268            data['department'] = rpl[ai +2]
1269            self.modifyRecord(**data)
1270    ###)
1271
1272
1273InitializeClass(CertificatesCatalog)
1274###)
1275
1276class CoursesCatalog(WAeUPTable): ###(
1277    security = ClassSecurityInfo()
1278
1279    meta_type = 'WAeUP Courses Catalog'
1280    name =  "courses_catalog"
1281    key = "code"
1282    def __init__(self,name=None):
1283        if name ==  None:
1284            name =  self.name
1285        WAeUPTable.__init__(self, name)
1286
1287    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1288        """ clear the catalog, then re-index everything """
1289
1290        elapse = time.time()
1291        c_elapse = time.clock()
1292
1293        pgthreshold = self._getProgressThreshold()
1294        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1295        self.refreshCatalog(clear=1, pghandler=handler)
1296
1297        elapse = time.time() - elapse
1298        c_elapse = time.clock() - c_elapse
1299
1300        RESPONSE.redirect(
1301            URL1 +
1302            '/manage_catalogAdvanced?manage_tabs_message=' +
1303            urllib.quote('Catalog Updated \n'
1304                         'Total time: %s\n'
1305                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1306    ###)
1307
1308    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1309        if isinstance(name, str):
1310            name = (name,)
1311        courses = self.portal_catalog(portal_type="Course")
1312        num_objects = len(courses)
1313        if pghandler:
1314            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1315        for i in xrange(num_objects):
1316            if pghandler: pghandler.report(i)
1317            course_brain = courses[i]
1318            course_object = course_brain.getObject()
1319            pl = course_brain.getPath().split('/')
1320            data = {}
1321            cid = data[self.key] = course_brain.getId
1322            data['faculty'] = pl[-4]
1323            data['department'] = pl[-3]
1324            doc = course_object.getContent()
1325            for field in name:
1326                if field not in (self.key,'faculty','department'):
1327                    data[field] = getattr(doc,field)
1328            self.modifyRecord(**data)
1329        if pghandler: pghandler.finish()
1330    ###)
1331
1332    def refreshCatalog(self, clear=0, pghandler=None): ###(
1333        """ re-index everything we can find """
1334        if clear:
1335            self._catalog.clear()
1336        courses = self.portal_catalog(portal_type="Course")
1337        num_objects = len(courses)
1338        if pghandler:
1339            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1340        #from pdb import set_trace;set_trace()
1341        for i in xrange(num_objects):
1342            if pghandler: pghandler.report(i)
1343            course_brain = courses[i]
1344            course_doc = course_brain.getObject().getContent()
1345            pl = course_brain.getPath().split('/')
1346            data = {}
1347            for field in self.schema():
1348                data[field] = getattr(course_doc,field,None)
1349            data[self.key] = course_brain.getId
1350            ai = pl.index('academics')
1351            data['faculty'] = pl[ai +1]
1352            data['department'] = pl[ai +2]
1353            if clear:
1354                self.addRecord(**data)
1355            else:
1356                self.modifyRecord(**data)
1357        if pghandler: pghandler.finish()
1358    ###)
1359
1360    security.declarePrivate('notify_event_listener') ###(
1361    def notify_event_listener(self,event_type,object,infos):
1362        "listen for events"
1363        if not infos.has_key('rpath'):
1364            return
1365        pt = getattr(object,'portal_type',None)
1366        mt = getattr(object,'meta_type',None)
1367        if pt != 'Course':
1368            return
1369        data = {}
1370        rpl = infos['rpath'].split('/')
1371        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1372            return
1373        course_id = object.getId()
1374        data[self.key] = course_id
1375        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1376            try:
1377                self.addRecord(**data)
1378            except ValueError:
1379                return
1380            course_id = object.getId()
1381            doc = object.getContent()
1382            if doc is None:
1383                return
1384            for field in self.schema():
1385                data[field] = getattr(doc,field,None)
1386            data[self.key] = course_id
1387            ai = rpl.index('academics')
1388            data['faculty'] = rpl[ai +1]
1389            data['department'] = rpl[ai +2]
1390            self.modifyRecord(**data)
1391            return
1392        if event_type == "sys_del_object":
1393            self.deleteRecord(course_id)
1394            return
1395        if event_type == "sys_modify_object" and mt == 'Course':
1396            #from pdb import set_trace;set_trace()
1397            for field in self.schema():
1398                data[field] = getattr(object,field,None)
1399            course_id = object.aq_parent.getId()
1400            data[self.key] = course_id
1401            ai = rpl.index('academics')
1402            data['faculty'] = rpl[ai +1]
1403            data['department'] = rpl[ai +2]
1404            self.modifyRecord(**data)
1405    ###)
1406
1407
1408InitializeClass(CoursesCatalog)
1409###)
1410
1411class CourseResults(WAeUPTable): ###(
1412    security = ClassSecurityInfo()
1413
1414    meta_type = 'WAeUP Results Catalog'
1415    name = "course_results"
1416    key = "key" #student_id + level + course_id
1417    def __init__(self,name=None):
1418        if name ==  None:
1419            name = self.name
1420        WAeUPTable.__init__(self, name)
1421        self._queue = []
1422
1423    def addMultipleRecords(self, records): ###(
1424        """add many records"""
1425        existing_uids = []
1426        for data in records:
1427            uid = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1428            data['%s' % self.key] = uid
1429            query = Eq(self.key, uid)
1430            res = self.course_results.evalAdvancedQuery(query)
1431            if len(res) > 0:
1432                rec = res[0]
1433                equal = True
1434                for attr in ('student_id','level_id','course_id'):
1435                    if getattr(rec,attr,'') != data[attr]:
1436                        equal = False
1437                        break
1438                if equal:
1439                    existing_uids += uid,
1440                    continue
1441            self.catalog_object(dict2ob(data), uid=uid)
1442        return existing_uids
1443    ###)
1444
1445    def deleteResultsHere(self,level_id,student_id): ###(
1446        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1447        course_results = self.course_results.evalAdvancedQuery(query)
1448        #import pdb;pdb.set_trace()
1449        for result in course_results:
1450            self.deleteRecord(result.key)
1451    ###)
1452
1453    def moveResultsHere(self,level,student_id): ###(
1454        #import pdb;pdb.set_trace()
1455        level_id = level.getId()
1456        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1457        course_results = self.course_results.evalAdvancedQuery(query)
1458        existing_courses = [cr.code for cr in course_results]
1459        to_delete = []
1460        for code,obj in level.objectItems():
1461            to_delete.append(code)
1462            carry_over = False
1463            if code.endswith('_co'):
1464                carry_over = True
1465                code  = code[:-3]
1466            if code in existing_courses:
1467                continue
1468            course_result_doc = obj.getContent()
1469            data = {}
1470            course_id = code
1471            for field in self.schema():
1472                data[field] = getattr(course_result_doc,field,'')
1473            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1474            data['student_id'] = student_id
1475            data['level_id'] = level_id
1476            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1477            data['session_id'] = session_id
1478            #data['queue_status'] = OBJECT_CREATED
1479            data['code'] = course_id
1480            data['carry_over'] = carry_over
1481            self.catalog_object(dict2ob(data), uid=key)
1482        level.manage_delObjects(to_delete)
1483    ###)
1484
1485    def getCourses(self,student_id,level_id): ###(
1486        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1487        course_results = self.course_results.evalAdvancedQuery(query)
1488        carry_overs = []
1489        normal1 = []
1490        normal2 = []
1491        normal3 = []
1492        total_credits = 0
1493        gpa = 0
1494        for brain in course_results:
1495            d = {}
1496
1497            for field in self.schema():
1498                d[field] = getattr(brain,field,None)
1499                if repr(d[field]) == 'Missing.Value':
1500                    d[field] = ''
1501            d['weight'] = ''
1502            d['grade'] = ''
1503            d['score'] = ''
1504
1505            if str(brain.credits).isdigit():
1506                credits = int(brain.credits)
1507                total_credits += credits
1508                score = getattr(brain,'score',0)
1509                if score and str(score).isdigit() and int(score) > 0:
1510                    score = int(score)
1511                    grade,weight = self.getGradesFromScore(score,'')
1512                    gpa += weight * credits
1513                    d['weight'] = weight
1514                    d['grade'] = grade
1515                    d['score'] = score
1516
1517            #if str(brain.ca1).isdigit() and str(brain.ca2).isdigit() and str(brain.exam).isdigit():
1518            #    d['score_calc'] = int(brain.ca1) + int(brain.ca2) + int(brain.exam)
1519            #else:
1520            #    d['score_calc'] = ''
1521            try:
1522                d['score_calc'] = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1523            except:
1524                d['score_calc'] = ''
1525
1526            if d['score_calc']:
1527                grade = self.getGradesFromScore(d['score_calc'],level_id)
1528                d['grade'] = grade
1529
1530            d['coe'] = ''
1531            if brain.core_or_elective:
1532                d['coe'] = 'Core'
1533            elif brain.core_or_elective == False:
1534                d['coe'] = 'Elective'
1535            id = code = d['id'] = brain.code
1536            d['code'] = code
1537            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1538            if res:
1539                course = res[0]
1540                d['title'] = course.title
1541                # The courses_catalog contains strings and integers in its semester field.
1542                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1543                d['semester'] = str(course.semester)
1544            else:
1545                d['title'] = "Course has been removed from course list"
1546                d['semester'] = ''
1547            if brain.carry_over:
1548                d['coe'] = 'CO'
1549                carry_overs.append(d)
1550            else:
1551                if d['semester'] == '1':
1552                    normal1.append(d)
1553
1554                elif d['semester'] == '2':
1555                    normal2.append(d)
1556                else:
1557                    normal3.append(d)
1558        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1559        #                                "%(semester)s%(code)s" % y))
1560        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1561                                             "%(semester)s%(code)s" % y))
1562        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1563    ###)
1564
1565   
1566    # for transcript only
1567    def getAllCourses(self,student_id): ###(
1568        query = Eq('student_id',student_id)
1569        course_results = self.course_results.evalAdvancedQuery(query)
1570        courses = []
1571        for brain in course_results:
1572            d = {}
1573
1574            for field in self.schema():
1575                d[field] = getattr(brain,field,'')
1576
1577            d['weight'] = ''
1578            d['grade'] = ''
1579            d['score'] = ''
1580
1581            if str(brain.credits).isdigit():
1582                credits = int(brain.credits)
1583                score = getattr(brain,'score',0)
1584                if score and str(score).isdigit() and int(score) > 0:
1585                    score = int(score)
1586                    grade,weight = self.getGradesFromScore(score,'')
1587                    d['weight'] = weight
1588                    d['grade'] = grade
1589                    d['score'] = score
1590            d['coe'] = ''
1591            if brain.core_or_elective:
1592                d['coe'] = 'Core'
1593            elif brain.core_or_elective == False:
1594                d['coe'] = 'Elective'
1595            id = code = d['id'] = brain.code
1596            d['code'] = code
1597            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1598            if res:
1599                course = res[0]
1600                d['title'] = course.title
1601                # The courses_catalog contains strings and integers in its semester field.
1602                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1603                d['semester'] = str(course.semester)
1604            else:
1605                d['title'] = "Course has been removed from course list"
1606                d['semester'] = ''
1607            if brain.carry_over:
1608                d['coe'] = 'CO'
1609            courses.append(d)
1610        return courses
1611    ###)
1612   
1613    def getYearGroupAverage(self,session_id,level_id): ###(
1614        query = Eq('session_id',session_id) & Eq('level_id',level_id)
1615        course_results = self.course_results.evalAdvancedQuery(query)
1616        yga1 = 0
1617        yg1 = []
1618        counter1 = 0
1619        yga2 = 0
1620        yg2 = []
1621        counter2 = 0
1622        yga3 = 0
1623        yg3 = []
1624        counter3 = 0       
1625        #import pdb;pdb.set_trace()
1626        for brain in course_results:
1627            try:
1628                om = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1629                if not om > 0:
1630                    continue
1631                code = brain.code               
1632                res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1633                if res:
1634                    course = res[0]
1635                    # The courses_catalog contains strings and integers in its semester field.
1636                    # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1637                    semester = str(course.semester)
1638                else:
1639                    semester = ''
1640                if semester == '1':
1641                    counter1 += 1
1642                    yga1 += om
1643                    yg1.append(om)
1644                elif semester == '2':
1645                    counter2 += 1
1646                    yga2 += om     
1647                    yg2.append(om)   
1648                if semester == '3':
1649                    counter3 += 1
1650                    yga3 += om
1651                    yg3.append(om)
1652            except:
1653                continue               
1654        if counter1:
1655            yga1 /= counter1
1656            yga1 = '%.2f' % yga1   
1657        if counter2:
1658            yga2 /= counter2
1659            yga2 = '%.2f' % yga2   
1660        if counter3:
1661            yga3 /= counter3
1662            yga3 = '%.2f' % yga3                                   
1663        return yga1, yga2, yga3, counter1, counter2, counter3, yg1, yg2, yg3
1664    ###)
1665   
1666   
1667    #security.declarePublic("calculateCoursePosition")
1668    def calculateCoursePosition(self,session_id,level_id,code,score,semester=None):
1669        #"""calculate Course Position"""
1670        query = Eq('session_id',session_id) & Eq('level_id',level_id) & Eq('code',code)
1671        course_results = self.course_results.evalAdvancedQuery(query)
1672        ygc = []
1673        #import pdb;pdb.set_trace() 
1674        for brain in course_results:
1675            try:
1676                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
1677                    continue
1678                #code = brain.code   
1679                if semester:
1680                    res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1681                    if res:
1682                        course = res[0]
1683                        # The courses_catalog contains strings and integers in its semester field.
1684                        # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1685                        semester_from_course = str(course.semester)
1686                    else:
1687                        continue
1688                    if semester != semester_from_course:
1689                        continue
1690                ygc.append(float(brain.ca1) + float(brain.ca2) + float(brain.exam))
1691            except:
1692                continue     
1693        ygc.sort(reverse=True)
1694        if not len(ygc):
1695            return 'no result'
1696        #import pdb;pdb.set_trace()       
1697        for pos in range(len(ygc)):
1698            if ygc[pos] <= float(score):
1699                break
1700        output = {}   
1701        output['pos'] =  '%d of %d' % (pos+1,len(ygc))
1702        output['ygc'] = ygc
1703        return output
1704       
1705    security.declareProtected(ModifyPortalContent,"calculateAllCoursePositions")
1706    def calculateAllCoursePositions(self,session_id=None):
1707        """calculate All Course Positions"""
1708        logger = logging.getLogger('WAeUPTables.CourseResults.calculateAllCoursePositions')
1709        member = self.portal_membership.getAuthenticatedMember()
1710        logger.info('%s starts recalculation of positions in session %s' % (member,session_id))
1711        if session_id:
1712            query = Eq('session_id',session_id)
1713        else:
1714            return 'no session_id provided'
1715        course_results = self.course_results.evalAdvancedQuery(query)
1716        for brain in course_results:
1717            try:
1718                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
1719                    data = {}
1720                    data[self.key] = brain.key
1721                    data['pic'] = ''
1722                    self.modifyRecord(**data)                   
1723                    continue
1724                res = self.courses_catalog.evalAdvancedQuery(Eq('code',brain.code))
1725                if res:
1726                    course = res[0]
1727                    semester_from_course = str(course.semester)
1728                else:
1729                    continue                   
1730                score = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1731                pic = self.calculateCoursePosition(session_id,brain.level_id,brain.code,score,semester_from_course)['pos']
1732                data = {}
1733                data[self.key] = brain.key
1734                data['pic'] = pic
1735                self.modifyRecord(**data)
1736            except:
1737                data = {}
1738                data[self.key] = brain.key
1739                data['pic'] = ''
1740                self.modifyRecord(**data)
1741                continue       
1742        logger.info('recalculation finished')             
1743        return 'ready'   
1744   
1745    def exportRemoveAllCourses(self,student_id,export=False,remove=False): ###(
1746        ""
1747        query = Eq('student_id',student_id)
1748        cr_catalog = self.course_results
1749        course_results = cr_catalog.evalAdvancedQuery(query)
1750        courses = []
1751        fields = self.schema()
1752        format = '"%(' + ')s","%('.join(fields) + ')s"'
1753        for brain in course_results:
1754            d = {}
1755            for field in fields:
1756                d[field] = getattr(brain,field,'')
1757            courses.append(format % d)
1758               
1759        if export:
1760            export_file = "%s/export/course_results_removed.csv" % (i_home)
1761            if not os.path.exists(export_file): 
1762                file_handler = open(export_file,"a")
1763                headline = ','.join(fields)
1764                file_handler.write(headline +'\n')
1765            else:
1766                file_handler = open(export_file,"a")
1767            for line in courses:
1768                file_handler.write(line +'\n')
1769
1770        if remove:
1771            for brain in course_results:
1772                key = getattr(brain,'key','')
1773                cr_catalog.deleteRecord(key)
1774       
1775        return courses
1776    ###)   
1777   
1778   
1779
1780InitializeClass(CourseResults)
1781###)
1782
1783class OnlinePaymentsImport(WAeUPTable): ###(
1784
1785    meta_type = 'WAeUP Online Payment Transactions'
1786    name = "online_payments_import"
1787    key = "order_id"
1788    def __init__(self,name=None):
1789        if name ==  None:
1790            name = self.name
1791        WAeUPTable.__init__(self, name)
1792
1793
1794InitializeClass(OnlinePaymentsImport)
1795###)
1796
1797class ReturningImport(WAeUPTable): ###(
1798
1799    meta_type = 'Returning Import Table'
1800    name = "returning_import"
1801    key = "matric_no"
1802    def __init__(self,name=None):
1803        if name ==  None:
1804            name = self.name
1805        WAeUPTable.__init__(self, name)
1806
1807
1808InitializeClass(ReturningImport)
1809###)
1810
1811class ResultsImport(WAeUPTable): ###(
1812
1813    meta_type = 'Results Import Table'
1814    name = "results_import"
1815    key = "key"
1816    def __init__(self,name=None):
1817        if name ==  None:
1818            name = self.name
1819        WAeUPTable.__init__(self, name)
1820
1821
1822InitializeClass(ResultsImport)
1823
1824###)
1825
1826class PaymentsCatalog(WAeUPTable): ###(
1827    security = ClassSecurityInfo()
1828
1829    meta_type = 'WAeUP Payments Catalog'
1830    name = "payments_catalog"
1831    key = "order_id"
1832    def __init__(self,name=None):
1833        if name ==  None:
1834            name = self.name
1835        WAeUPTable.__init__(self, name)
1836
1837
1838    security.declarePrivate('notify_event_listener') ###(
1839    def notify_event_listener(self,event_type,object,infos):
1840        "listen for events"
1841        if not infos.has_key('rpath'):
1842            return
1843        pt = getattr(object,'portal_type',None)
1844        mt = getattr(object,'meta_type',None)
1845        data = {}
1846        if pt != 'Payment':
1847            return
1848        if event_type == 'sys_del_object' and mt == 'CPS Proxy Folder':
1849            self.deleteRecord(object.getContent().order_id)
1850        if mt == 'CPS Proxy Folder':
1851            return # is handled only for the real object
1852        if event_type not in ('sys_modify_object'):
1853            return
1854        for field in self.schema():
1855            data[field] = getattr(object,field,'')
1856        rpl = infos['rpath'].split('/')
1857        #import pdb;pdb.set_trace()
1858        student_id = rpl[-4]
1859        data['student_id'] = student_id
1860        modified = False
1861        try:
1862            self.modifyRecord(**data)
1863            modified = True
1864        except KeyError:
1865            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1866            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1867            pass
1868        if not modified:
1869            try:
1870                self.addRecord(**data)
1871            except:
1872                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1873                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1874        ###)
1875
1876
1877    def exportRemoveAllPayments(self,student_id,export=False,remove=False): ###(
1878        ""
1879        query = Eq('student_id',student_id)
1880        pm_catalog = self.payments_catalog
1881        payments = pm_catalog.evalAdvancedQuery(query)
1882        payments_dic = []
1883        fields = self.schema()
1884        format = '"%(' + ')s","%('.join(fields) + ')s"'
1885        for brain in payments:
1886            d = {}
1887            for field in fields:
1888                d[field] = getattr(brain,field,'')
1889            payments_dic.append(format % d)
1890               
1891        if export:
1892            export_file = "%s/export/payments_removed.csv" % (i_home)
1893            if not os.path.exists(export_file): 
1894                file_handler = open(export_file,"a")
1895                headline = ','.join(fields)
1896                file_handler.write(headline +'\n')
1897            else:
1898                file_handler = open(export_file,"a")
1899            for line in payments_dic:
1900                file_handler.write(line +'\n')
1901
1902        if remove:
1903            for brain in payments:
1904                order_id = getattr(brain,'order_id','')
1905                pm_catalog.deleteRecord(order_id)
1906       
1907        return payments_dic
1908    ###)   
1909
1910    security.declareProtected(ModifyPortalContent,"dumpPayments")###(
1911    def dumpPayments(self,session_id=''):
1912        """dump all valid payments and combine with student data """
1913       
1914        if not session_id:
1915            session_id  = self.getSessionId()[0]
1916        member = self.portal_membership.getAuthenticatedMember()
1917        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpPayments')
1918        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
1919        export_file = "%s/export/valid_payments%s_%s.csv" % (i_home,session_id,current,)       
1920       
1921        pm_catalog = self.payments_catalog
1922        query = Eq('status','paid') & Eq('type','online') & Eq('session_id',session_id)
1923        payments = pm_catalog.evalAdvancedQuery(query)
1924        payments_dic = []
1925        s_catalog = self.students_catalog
1926        fields_pm = pm_catalog.schema()
1927        fields_s = s_catalog.schema()
1928        fields =  fields_pm + fields_s
1929       
1930        format = '"%(' + ')s","%('.join(fields) + ')s"'
1931        #import pdb;pdb.set_trace()
1932        for brain in payments:
1933            d = {}
1934            for field in fields_pm:
1935                d[field] = getattr(brain,field,'')
1936           
1937            student_id = getattr(brain,'student_id','')
1938            query = Eq('id',student_id)   
1939            student = s_catalog.evalAdvancedQuery(query)
1940            if student:
1941                for field in fields_s:
1942                    d[field] = getattr(student[0],field,'')               
1943            payments_dic.append(format % d)     
1944           
1945        if not os.path.exists(export_file): 
1946            file_handler = open(export_file,"a")
1947            headline = ','.join(fields)
1948            file_handler.write(headline +'\n')
1949        else:
1950            file_handler = open(export_file,"a")
1951        for line in payments_dic:
1952            file_handler.write(line +'\n')     
1953       
1954        return 'ready'       
1955           
1956
1957InitializeClass(PaymentsCatalog)
1958
1959###)
1960
1961class RemovedStudentIds(WAeUPTable): ###(
1962
1963    meta_type = 'WAeUP Removed StudentIds'
1964    name = "removed_student_ids"
1965    key = "id"
1966    def __init__(self,name=None):
1967        if name ==  None:
1968            name = self.name
1969        WAeUPTable.__init__(self, name)
1970
1971
1972InitializeClass(RemovedStudentIds)
1973
1974###)
1975
1976# BBB:
1977AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.