From Students.py: security.declareProtected(ModifyPortalContent,"loadFullTimeStudentsFromCSV")###( def loadFullTimeStudentsFromCSV(self): """load Fulltime Studentdata from CSV values""" import transaction import random tr_count = 0 name = 'short_full_time' no_import = False if not no_import: no_import = open("%s/import/%s_not_imported.csv" % (i_home,name),"w") no_import.write('"MatricNo","EntryRegNo","CurrentSession","StudentLevel","fullname","FirstName","MiddleName","Lastname","FormerSurname","Sex","Nationality","State","LGA","PermanentAddress","PermanentAddressCity","CampusAddress","PhoneNumber","Emailaddress","Mode","CourseMajor","Faculty","Dept"\n') logger = logging.getLogger('Import.%s' % name) logger.info('Start loading from %s.csv' % name) pwlist = [] pwlist.append('"student_id","firstname","middlename","lastname","matric_no","jamb_reg_no","access_code"') pwl_template = Template('"$student_id","$firstname","$middlename","$lastname","$matric_no","$jamb_reg_no","$access_code"') students_folder = self.portal_catalog({'meta_type': 'StudentsFolder'})[-1].getObject() try: students = csv.DictReader(open("%s/import/%s.csv" % (i_home,name),"rb")) except: logger.error('Error reading %s.csv' % name) return l = self.portal_catalog({'meta_type': "StudentClearance",}) matrics = [] for s in l: matrics.append(s.getObject().getContent().matric_no) print matrics l = self.portal_catalog({'meta_type': "Certificate"}) certs = {} for c in l: ca,ac,fa,dep_id,co,certcode = c.relative_path.split('/') cid = "%(dep_id)s_%(certcode)s" % vars() certs[cid] = c.getObject() for student in students: logger.info('processing "%(MatricNo)s","%(EntryRegNo)s","%(CurrentSession)s","%(StudentLevel)s","%(fullname)s","%(FirstName)s","%(MiddleName)s","%(Lastname)s","%(FormerSurname)s","%(Sex)s","%(Nationality)s","%(State)s","%(LGA)s","%(PermanentAddress)s","%(PermanentAddressCity)s","%(CampusAddress)s","%(PhoneNumber)s","%(Emailaddress)s","%(Mode)s","%(CourseMajor)s","%(Faculty)s","%(Dept)s"\n' % student) sid = student.get('MatricNo') if sid == "": em = 'Empty MatricNo\n' logger.info(em) no_import.write(em) no_import.write('"%(MatricNo)s","%(EntryRegNo)s","%(CurrentSession)s","%(StudentLevel)s","%(fullname)s","%(FirstName)s","%(MiddleName)s","%(Lastname)s","%(FormerSurname)s","%(Sex)s","%(Nationality)s","%(State)s","%(LGA)s","%(PermanentAddress)s","%(PermanentAddressCity)s","%(CampusAddress)s","%(PhoneNumber)s","%(Emailaddress)s","%(Mode)s","%(CourseMajor)s","%(Faculty)s","%(Dept)s"\n' % student) continue certcode = makeCertificateCode(student.get('CourseMajor')) dep_id = student.get('Dept') fac_id = student.get('Faculty') cid = "%(dep_id)s_%(certcode)s" % vars() if cid not in certs.keys(): em = 'Certificate with ID %s %s not found\n' % (certcode, student.get('CourseMajor')) logger.info(em) no_import.write(em) no_import.write('"%(MatricNo)s","%(EntryRegNo)s","%(CurrentSession)s","%(StudentLevel)s","%(fullname)s","%(FirstName)s","%(MiddleName)s","%(Lastname)s","%(FormerSurname)s","%(Sex)s","%(Nationality)s","%(State)s","%(LGA)s","%(PermanentAddress)s","%(PermanentAddressCity)s","%(CampusAddress)s","%(PhoneNumber)s","%(Emailaddress)s","%(Mode)s","%(CourseMajor)s","%(Faculty)s","%(Dept)s"\n' % student) continue certificate_doc = certs[cid].getContent() level = student.get('StudentLevel') try: int(level) except: em = 'Student with ID %(MatricNo)s StudentLevel is empty\n' % student logger.info(em) no_import.write(em) no_import.write('"%(MatricNo)s","%(EntryRegNo)s","%(CurrentSession)s","%(StudentLevel)s","%(fullname)s","%(FirstName)s","%(MiddleName)s","%(Lastname)s","%(FormerSurname)s","%(Sex)s","%(Nationality)s","%(State)s","%(LGA)s","%(PermanentAddress)s","%(PermanentAddressCity)s","%(CampusAddress)s","%(PhoneNumber)s","%(Emailaddress)s","%(Mode)s","%(CourseMajor)s","%(Faculty)s","%(Dept)s"\n' % student) continue matric_no = student.get('MatricNo') if matric_no not in matrics: matrics.append(matric_no) sid = self.generateStudentId(student.get('Lastname')[0]) #self.log('Creating Faculty %(id)s = %(Title)s' % faculty) students_folder.invokeFactory('Student', sid) logger.info('%(tr_count)s: Creating Student with ID %(sid)s Matric_no %(matric_no)s ' % vars()) student_obj = getattr(self,sid) access_code = "%d" % random.randint(1000000000,9999999999) student_obj.getContent().makeStudentMember(sid,access_code,) pwl_dict = {'student_id': sid,'access_code':access_code} student_obj.invokeFactory('StudentApplication','application') application = student_obj.application da = {'Title': 'Application Data'} student_obj.invokeFactory('StudentPersonal','personal') da['jamb_reg_no'] = student.get('EntryRegNo') personal = student_obj.personal dp = {'Title': 'Personal Data'} student_obj.invokeFactory('StudentClearance','clearance') clearance = student_obj.clearance dc = {'Title': 'Clearance/Eligibility Record'} dc['matric_no'] = matric_no state = student.get('State') lga = student.get('LGA') if state and lga: lga = state + ' / ' + lga else: lga = "None" dc['lga'] = lga dc['nationality'] = student.get('Nationality') dc['email'] = student.get('Emailaddress') dp['firstname'] = student.get('FirstName') dp['middlename'] = student.get('MiddleName') dp['lastname'] = student.get('Lastname') dp['former_surname'] = student.get('FormerSurname') dp['sex'] = student.get('Sex') == 'F' dp['perm_address'] = student.get('PermanentAddress') dp['perm_city'] = student.get('PermanentAddressCity') dp['campus_address'] = student.get('CampusAddress') dp['phone'] = student.get('PhoneNumber') application.getContent().edit(mapping=da) personal.getContent().edit(mapping=dp) clearance.getContent().edit(mapping=dc) # # Study Course # student_obj.invokeFactory('StudentStudyCourse','study_course') studycourse = student_obj.study_course dsc = {} from_certificate = ['title', 'max_elect', 'max_pass', 'n_core', 'nr_years', 'probation_credits', 'promotion_credits', 'start_level', ] for f in from_certificate: dsc[f] = getattr(certificate_doc,f) dsc['faculty'] = fac_id dsc['department'] = dep_id dsc['study_course'] = certcode css = student.get('CurrentSession') or '2004-2005' cs = int(css.split('-')[0]) - 2000 cl = int(student.get('StudentLevel') or '100')/100 dsc['entry_session'] = "200%s" % (cs - cl) dsc['clr_ac_pin'] = access_code studycourse.getContent().edit(mapping=dsc) # # Level # ## l = getattr(studycourse,level,None) ## if 0 and l is None: ## #self.log('Creating Department %(DeptCode)s = %(Description)s' % dep) ## logger.info('Creating Level %(StudentLevel)s for %(fullname)s' % student) ## studycourse.invokeFactory('StudentStudyLevel', level) ## l = getattr(studycourse, level) ## certificate = certs[certcode] ## cert_level = getattr(certificate,level,None) ## if cert_level is None: ## logger.info('Level %(level)s not in %(certcode)s' % vars()) ## l.getContent().edit(mapping={'Title': "Level %s" % level}) else: em = 'Student with ID %(MatricNo)s %(fullname)s already exists\n' % student logger.info(em) no_import.write(em) no_import.write('"%(MatricNo)s","%(EntryRegNo)s","%(CurrentSession)s","%(StudentLevel)s","%(fullname)s","%(FirstName)s","%(MiddleName)s","%(Lastname)s","%(FormerSurname)s","%(Sex)s","%(Nationality)s","%(State)s","%(LGA)s","%(PermanentAddress)s","%(PermanentAddressCity)s","%(CampusAddress)s","%(PhoneNumber)s","%(Emailaddress)s","%(Mode)s","%(CourseMajor)s","%(Faculty)s","%(Dept)s"\n' % student) continue if tr_count > MAX_TRANS: transaction.commit() em = 'Transaction commited\n' % student logger.info(em) tr_count = 0 tr_count += 1 pwl_dict.update(dc) pwl_dict.update(da) pwl_dict.update(dp) wftool = self.portal_workflow pwlist.append(pwl_template.substitute(pwl_dict)) wftool.doActionFor(student_obj,'clear_and_validate') student_obj.manage_setLocalRoles(sid, ['Owner',]) wftool.doActionFor(application,'close') application.manage_setLocalRoles(sid, ['Owner',]) wftool.doActionFor(clearance,'close') clearance.manage_setLocalRoles(sid, ['Owner',]) wftool.doActionFor(personal,'close') personal.manage_setLocalRoles(sid, ['Owner',]) wftool.doActionFor(studycourse,'close_for_edit') studycourse.manage_setLocalRoles(sid, ['Owner',]) open("%s/import/pwlist-%s.csv" % (i_home,name),"w+").write('\n'.join(pwlist)) return self.REQUEST.RESPONSE.redirect("%s" % self.REQUEST.get('URL1')) ###) security.declareProtected(ModifyPortalContent,"loadPumeResultsFromCSV")###( def loadPumeResultsFromCSV(self): """load Fulltime Studentdata from CSV values into pumeresults catalog""" import transaction import random ## csv_d = {'jamb_reg_no': "RegNumber", ###( ## 'status': "Admission Status", ## 'name': "Name", ## 'score': "Score", ## 'sex': "Sex", ## 'faculty': "Faculty", ## 'department': "Dept", ## 'course': "Course", ## 'course_code_org': "Course Code", ## } ###) csv_d = {'jamb_reg_no': "JAMBRegno", 'name': "Name", 'score': "Score", 'sex': "Sex", 'course': "Course", 'faculty': "Faculty", 'department': "Dept", 'course_code_org': "Course Code", 'status': "Admission Status", 'result_type': None, } csv_fields = [f[1] for f in csv_d.items() if f[1]] tr_count = 0 total = 0 #name = 'pup_new' name = 'pup_update' update = name.endswith('update') no_import = [] ok_import = [] ok_import.append('%s' % ','.join(['"%s"' % fn for fn in csv_d.keys()])) no_import.append('%s' % ','.join(['"%s"' % fn for fn in csv_fields])) current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S") ok_import_name = "%s/import/%s_imported_%s.csv" % (i_home,name,current) #open(ok_import_name,"w").write('\n'.join(no_import)) no_import_name = "%s/import/%s_not_imported_%s.csv" % (i_home,name,current) #open(no_import_name,"w").write('\n'.join(no_import)) logger = logging.getLogger('Import.%s' % name) starttime = DateTime.now() logger.info('Start loading from %s.csv' % name) try: result = csv.DictReader(open("%s/import/%s.csv" % (i_home,name),"rb")) except: logger.error('Error reading %s.csv' % name) return pume = self.portal_pumeresults format = ','.join(['"%%(%s)s"' % fn for fn in csv_fields]) import_format = ','.join(['"%%(%s)s"' % fn for fn in csv_d.keys()]) eduplicate = '%s,"duplicate"' % format einvalidjamb = '%s,"invalid JambRegNo"' % format added = 'added ,%s' % format #from pdb import set_trace;set_trace() for jamb in result: dict = {} for f,fn in csv_d.items(): dict[f] = jamb.get(csv_d[f]) dict['result_type'] = 'DE' jnr = jamb.get(csv_d['jamb_reg_no']) if not checkJambNo(jnr): logger.info(einvalidjamb % jamb) dd = {} for f,fn in csv_d.items(): dd[fn] = getattr(data,f) no_import.append(eduplicate % dd) no_import.append(eduplicate % jamb) continue res = pume(jamb_reg_no=jnr) if len(res) > 0: if update: try: pume.modifyRecord(**dict) except ValueError: logger.info(eduplicate % jamb) continue except KeyError: pume.addRecord(**dict) logger.info(added % jamb) continue else: data = res[0] if data.name != jamb.get(csv_d['name']): #set_trace() logger.info(eduplicate % jamb) #em = 'Student with REG-NO %(jamb_reg_no)s already exists\n' % dict #logger.info(em) dd = {} for f,fn in csv_d.items(): dd[fn] = getattr(data,f) no_import.append(eduplicate % dd) no_import.append(eduplicate % jamb) continue try: pume.addRecord(**dict) ok_import.append(import_format % dict) except ValueError: logger.info(eduplicate % jamb) #em = 'Student with REG-NO %(jamb_reg_no)s already exists\n' % dict #logger.info(em) no_import.append(eduplicate % jamb) logger.info('End loading from %s.csv' % name) if len(no_import) > 1: open(no_import_name,"w+").write('\n'.join(no_import)) open(ok_import_name,"w+").write('\n'.join(ok_import)) return self.REQUEST.RESPONSE.redirect("%s" % self.REQUEST.get('URL1')) ###) security.declareProtected(ModifyPortalContent,"OLDloadPumeResultsFromCSV")###( def OLDloadPumeResultsFromCSV(self): """load Fulltime Studentdata from CSV values""" import transaction import random wftool = self.portal_workflow students_folder = self.portal_catalog({'meta_type': 'StudentsFolder'})[-1].getObject() csv_d = {'jamb_reg_no': "JAMBRegno", 'jamb_lastname': "Name", 'pume_options': "Options", 'session': "Session", 'days': "Days", 'response': "Responce", 'wrong': "Wrong", 'pume_eng_score': "EngScore", 'pume_gen_score': "GenScore", 'pume_tot_score': "Score", 'batch': "Batch", 'serial': "SerialNo", 'jamb_score': "JambScore", 'omitted':"Omitted", 'search_key': "SearchKey", 'jamb_sex': "Sex", 'fac1': "Fac1", 'fac2': "Fac2", 'jamb_first_cos': "CourseofStudy", 'stud_status':"StudStatus", 'registered': "Registered", 'jamb_state': "State", 'eng_fail': "EngFail", 'gen_fail': "GenFail", 'un_ans_eng': "UnAnsEng", 'un_ans_eng': "UnAnsGen", 'total_ans': "TotalUnAns", 'dept': "Dept", 'jamb_second_cos': "Course2", 'jamb_third_cos': "course3", } csv_fields = [f[1] for f in csv_d.items()] tr_count = 0 name = 'pume_results' no_import = [] s = ','.join(['"%s"' % fn for fn in csv_fields]) no_import.append('%s\n' % s) logger = logging.getLogger('Import.%s' % name) logger.info('Start loading from %s.csv' % name) try: result = csv.DictReader(open("%s/import/%s.csv" % (i_home,name),"rb")) except: logger.error('Error reading %s.csv' % name) return for jamb in result: format = ','.join(['"%%(%s)s"' % fn for fn in csv_fields]) processing = "processing %s" % format logger.info(processing % jamb) jamb_reg_no = jamb.get(csv_d['jamb_reg_no']) #import pdb;pdb.set_trace() res = self.portal_catalog({'portal_type': "StudentApplication", 'jamb_reg_no': jamb_reg_no }) if res: em = 'Student with REG-NO %s already exists\n' % jamb_reg_no logger.info(em) no_import.append(em) no_import.append(format % jamb) continue cert_id = jamb.get(csv_d['jamb_first_cos']).upper() res = self.portal_catalog({'portal_type': "Certificate", 'id': cert_id }) if len(res) < 1: em = 'No Certificate with ID %s \n' % cert_id logger.info(em) no_import.append(em) no_import.append(format % jamb) continue cert = res[0].getObject() cert_path = res[0].getPath() cert_doc = cert.getContent() jamb_name = jamb.get(csv_d['jamb_lastname']) jamb_name.replace('>','') names = jamb_name.split() letter = names[-1][0].upper() sid = self.generateStudentId(letter) not_created = True while not_created: try: students_folder.invokeFactory('Student', sid) not_created = False except BadRequest: sid = self.generateStudentId(letter) logger.info('%(tr_count)s: Creating Student with ID %(sid)s REG-NO %(jamb_reg_no)s ' % vars()) student = getattr(self,sid) student.manage_setLocalRoles(sid, ['Owner',]) student.invokeFactory('StudentClearance','clearance') #wftool.doActionFor(student.clearance,'open') dp = {'Title': 'Clearance/Eligibility Record'} student.clearance.manage_setLocalRoles(sid, ['Owner',]) student.invokeFactory('StudentPume','pume') dp = {'Title': 'Pume Data'} student.invokeFactory('StudentApplication','application') da = {'Title': 'Application Data'} da["jamb_lastname"] = jamb_name da_fields = ('jamb_reg_no', 'jamb_sex', 'jamb_state', 'jamb_score', 'jamb_first_cos', 'jamb_sex', 'jamb_state', 'jamb_first_cos', 'jamb_second_cos', ) for f in da_fields: da[f] = jamb.get(csv_d[f]) app = student.application app.getContent().edit(mapping=da) app.manage_setLocalRoles(sid, ['Owner',]) #wftool.doActionFor(app,'close') dp_fields = ( 'pume_eng_score', 'pume_gen_score', 'pume_tot_score', ) for f in dp_fields: dp[f] = float(jamb.get(csv_d[f])) pume = student.pume pume.getContent().edit(mapping=dp) #wftool.doActionFor(pume,'close') pume.manage_setLocalRoles(sid, ['Owner',]) # # Study Course # student.invokeFactory('StudentStudyCourse','study_course') study_course = student.study_course dsc = {} from_certificate = ['title', 'max_elect', 'max_pass', 'n_core', 'nr_years', 'probation_credits', 'promotion_credits', 'start_level', ] for f in from_certificate: dsc[f] = getattr(cert_doc,f) cpl = cert_path.split('/') dsc['faculty'] = cpl[-4] dsc['department'] = cpl[-3] dsc['study_course'] = cert_id dsc['entry_session'] = jamb.get(csv_d['session']) study_course.getContent().edit(mapping=dsc) student.getContent().createSubObjects() if dp['pume_tot_score']>49: wftool.doActionFor(student,'pume_pass') wftool.doActionFor(student,'admit') else: wftool.doActionFor(student,'pume_fail') wftool.doActionFor(student,'reject_admission') if len(no_import) > 1: open("%s/import/%s_not_imported.csv" % (i_home,name),"w").write( '\n'.join(no_import)) return self.REQUEST.RESPONSE.redirect("%s" % self.REQUEST.get('URL1')) ###) security.declareProtected(ModifyPortalContent,"loadFullTimeStudentsResultsFromCSV") ###( def loadFullTimeStudentsResultsFromCSV(self): """load Fulltime Studentdata from CSV values""" #return level_wf_actions = {} level_wf_actions["SUCCESSFUL STUDENT"] = "pass_A" level_wf_actions["STUDENT WITH CARRYOVER COURSES"] = "pass_B" level_wf_actions["STUDENT FOR PROBATION"] = "probate_C" level_wf_actions["STUDENT ON PROBATION/TRANSFER"] = "reject_D" import transaction wftool = self.portal_workflow tr_count = 0 name = 'short_full_time_results_2004_2005' no_import = False if not no_import: no_import = open("%s/import/%s_not_imported.csv" % (i_home,name),"w") no_import.write('"Matnumber","CosCode","Ansbook","CosStuatus","Session","Mat_Cos","Score","CarryLevel","Grade","Weight","Semster","Verdict","Level","id","GPA"\n') logger = logging.getLogger('import.%s' % name) logger.info('Start loading from %s.csv' % name) students_folder = self.portal_catalog({'meta_type': 'StudentsFolder'})[-1].getObject() try: results = csv.DictReader(open("%s/import/%s.csv" % (i_home,name),"rb")) except: logger.error('Error reading %s.csv' % name) return l = self.portal_catalog({'meta_type': "Course"}) courses = {} for c in l: courses[c.id] = c.getObject() level_changed = False student_changed = False sid = '' #import pdb;pdb.set_trace() for result in results: temp_sid = result.get('Matnumber') if temp_sid != sid: student_changed = True res = self.portal_catalog({'meta_type': "StudentClearance", 'SearchableText': temp_sid }) if not res: em = 'Student with ID %(Matnumber)s not found\n' % result logger.info(em) no_import.write(em) no_import.write('"%(Matnumber)s","%(CosCode)s","%(Ansbook)s","%(CosStuatus)s","%(Session)s","%(Mat_Cos)s","%(Score)s","%(CarryLevel)s","%(Grade)s","%(Weight)s","%(Semster)s","%(Verdict)s","%(Level)s","%(id)s","%(GPA)s"\n' % result) continue elif len(res) > 1: em = 'More than one Student with ID %(Matnumber)s found\n' % result logger.info(em) no_import.write(em) no_import.write('"%(Matnumber)s","%(CosCode)s","%(Ansbook)s","%(CosStuatus)s","%(Session)s","%(Mat_Cos)s","%(Score)s","%(CarryLevel)s","%(Grade)s","%(Weight)s","%(Semster)s","%(Verdict)s","%(Level)s","%(id)s","%(GPA)s"\n' % result) continue sid = temp_sid sf = res[0].getObject().aq_parent sc = getattr(sf,'study_course') level = '' else: student_changed = False course = result.get('CosCode') if course not in courses.keys(): em = 'Course with ID %(CosCode)s not found\n' % result logger.info(em) no_import.write(em) no_import.write('"%(Matnumber)s","%(CosCode)s","%(Ansbook)s","%(CosStuatus)s","%(Session)s","%(Mat_Cos)s","%(Score)s","%(CarryLevel)s","%(Grade)s","%(Weight)s","%(Semster)s","%(Verdict)s","%(Level)s","%(id)s","%(GPA)s"\n' % result) continue course_doc = courses[course].getContent() temp_level = result.get('Level') student_id = sf.getId() result['StudentId'] = student_id if temp_level != level: try: int(temp_level) except: em = 'Result with ID %(Matnumber)s Course %(CosCode)s Level is empty\n' % result logger.info(em) no_import.write(em) no_import.write('"%(Matnumber)s","%(CosCode)s","%(Ansbook)s","%(CosStuatus)s","%(Session)s","%(Mat_Cos)s","%(Score)s","%(CarryLevel)s","%(Grade)s","%(Weight)s","%(Semster)s","%(Verdict)s","%(Level)s","%(id)s","%(GPA)s"\n' % result) continue level_changed = True if 'dlev' in vars().keys(): wftool.doActionFor(l,level_wf_actions[dlev['verdict']]) level = temp_level l = getattr(sc,level,None) if l is None: logger.info('Creating Level %(Level)s for %(StudentId)s %(Matnumber)s' % result) sc.invokeFactory('StudentStudyLevel', level) l = getattr(sc, level) l.manage_setLocalRoles(student_id, ['Owner',]) else: level_changed = False cr = getattr(l,course,None) if cr is None: logger.info('Creating Course %(CosCode)s for %(StudentId)s %(Matnumber)s in Level %(Level)s' % result) l.invokeFactory('StudentCourseResult',course) cr = getattr(l,course) dcr = {} from_course = ['title', 'credits', 'passmark', ] for f in from_course: dcr[f] = getattr(course_doc,f) dlev = {} dcr['ansbook'] = result.get('Ansbook') dcr['semester'] = getInt(result.get('Semster')) dcr['status'] = result.get('CosStuatus') dcr['score'] = getInt(result.get('Score')) dlev['session'] = result.get('Session') dcr['carry_level'] = result.get('CarryLevel') dcr['grade'] = result.get('Grade') dcr['weight'] = result.get('Weight') dlev['verdict'] = result.get('Verdict') dcr['import_id'] = result.get('id') gpa = result.get('GPA').replace(',','.') dlev['imported_gpa'] = getFloat(gpa) cr.getContent().edit(mapping = dcr) cr.manage_setLocalRoles(student_id, ['Owner',]) l.getContent().edit(mapping = dlev) if tr_count > MAX_TRANS: transaction.commit() tr_count = 0 tr_count += 1 wftool.doActionFor(cr,'close') wftool.doActionFor(l,level_wf_actions[dlev['verdict']]) return self.REQUEST.RESPONSE.redirect("%s" % self.REQUEST.get('URL1')) ###) security.declareProtected(ModifyPortalContent,"loadJAMBFromCSV")###( def loadJAMBFromCSV(self): """load JAMB data from CSV values""" #return students_folder = self.portal_catalog({'meta_type': 'StudentsFolder'})[-1].getObject() import transaction tr_count = 0 name = 'SampleJAMBDataII' wftool = self.portal_workflow no_import = False if not no_import: no_import = open("%s/import/%s_not_imported.csv" % (i_home,name),"w") no_import.write('REG-NO,NAME,SEX,STATE,LGA,ENG-SCORE,SUBJ1,SUBJ1-SCORE,SUBJ2,SUBJ2-SCORE,SUBJ3,SUBJ3-SCORE,AGGREGATE,UNIV1,FACULTY1,COURSE1,UNIV2,FACULTY2,COURSE2') logger = logging.getLogger('Import.%s' % name) logger.info('Start loading from %s.csv' % name) try: result = csv.DictReader(open("%s/import/%s.csv" % (i_home,name),"rb")) except: logger.error('Error reading %s.csv' % name) return for jamb in result: logger.info('processing %(REG-NO)s,%(NAME)s,%(SEX)s,%(STATE)s,%(LGA)s,%(ENG-SCORE)s,%(SUBJ1)s,%(SUBJ1-SCORE)s,%(SUBJ2)s,%(SUBJ2-SCORE)s,%(SUBJ3)s,%(SUBJ3-SCORE)s,%(AGGREGATE)s,%(UNIV1)s,%(FACULTY1)s,%(COURSE1)s,%(UNIV2)s,%(FACULTY2)s,%(COURSE2)s\n' % jamb) jamb_reg_no = jamb.get('REG-NO') res = self.portal_catalog({'meta_type': "StudentApplication", 'jamb_reg_no': jamb_reg_no }) if res: em = 'Student with REG-NO %(REG-NO)s already exists\n' % jamb logger.info(em) no_import.write(em) no_import.write('%(REG-NO)s,%(NAME)s,%(SEX)s,%(STATE)s,%(LGA)s,%(ENG-SCORE)s,%(SUBJ1)s,%(SUBJ1-SCORE)s,%(SUBJ2)s,%(SUBJ2-SCORE)s,%(SUBJ3)s,%(SUBJ3-SCORE)s,%(AGGREGATE)s,%(UNIV1)s,%(FACULTY1)s,%(COURSE1)s,%(UNIV2)s,%(FACULTY2)s,%(COURSE2)s\n' % jamb) continue jamb_name = jamb.get("NAME") jamb_name.replace('>','') names = jamb_name.split() letter = names[-1][0].upper() sid = self.generateStudentId(letter) not_created = True while not_created: try: students_folder.invokeFactory('Student', sid) not_created = False except BadRequest: sid = self.generateStudentId(letter) logger.info('%(tr_count)s: Creating Student with ID %(sid)s REG-NO %(jamb_reg_no)s ' % vars()) student = getattr(self,sid) student.manage_setLocalRoles(sid, ['Owner',]) student.invokeFactory('StudentApplication','application') da = {'Title': 'Application Data'} da["jamb_reg_no"] = jamb.get("REG-NO") da["jamb_lastname"] = jamb_name da["jamb_sex"] = jamb.get("SEX") da["jamb_state"] = jamb.get("STATE") da["jamb_lga"] = jamb.get("LGA") da["jamb_score"] = jamb.get("AGGREGATE") da["jamb_first_cos"] = jamb.get("COURSE1") da["jamb_second_cos"] = jamb.get("COURSE2") da["jamb_first_uni"] = jamb.get("UNIV1") da["jamb_second_uni"] = jamb.get("UNIV2") app = student.application app_doc = app.getContent() app_doc.edit(mapping=da) #wftool.doActionFor(app,'open',dest_container=app) app.manage_setLocalRoles(sid, ['Owner',]) student.getContent().createSubObjects() return self.REQUEST.RESPONSE.redirect("%s" % self.REQUEST.get('URL1')) ###) security.declareProtected(ModifyPortalContent,"importPreviousSessionStudents")###( def importPreviousSessionStudents(self): """load and create previous session students from CSV""" import transaction import random wftool = self.portal_workflow current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S") students_folder = self.portal_url.getPortalObject().campus.students tr_count = 1 total = 0 #name = 'pume_results' name = 'Previous' keys = self.portal_schemas.import_student.keys() keys += self.portal_schemas.import_student_level_data.keys() not_imported = [] imported = [] certificates = {} logger = logging.getLogger('Students.StudentsFolder.importPreviousSessionStudents') try: records = csv.DictReader(open("%s/import/%s.csv" % (i_home,name),"rb")) except: logger.error('Error reading %s.csv' % name) return start = True for record in records: if start: start = False logger.info('Start loading from %s.csv' % name) false_keys = [k for k in record.keys() if k not in keys] right_keys = [k for k in record.keys() if k in keys] if false_keys: logger.info('Fields %s not in schema' % false_keys) s = ','.join(['"%s"' % k for k in right_keys]) imported.append(s) not_imported.append('%s,"error"' % s) format = ','.join(['"%%(%s)s"' % k for k in right_keys]) format_error = format + ',"%(error)s"' study_course = makeCertificateCode(record.get('study_course')) matric_no = record.get('matric_no') student_res = self.students_catalog(matric_no = matric_no) if student_res: record['error'] = "Student exists" not_imported.append(format_error % record) continue if study_course not in certificates.keys(): cert_res = self.portal_catalog(portal_type='Certificate', id = study_course) if not cert_res: record['error'] = "No such studycourse" not_imported.append(format_error % record) continue certificates[cert_res[0].id] = cert_res[0] sid = self.generateStudentId('x',students_folder) students_folder.invokeFactory('Student', sid) #from pdb import set_trace;set_trace() record['student_id'] = sid student = getattr(self,sid) student.manage_setLocalRoles(sid, ['Owner',]) student.invokeFactory('StudentApplication','application') app = student.application app_doc = app.getContent() dict = {'Title': 'Application Data'} dict["jamb_lastname"] = "%(firstname)s %(lastname)s %(middlename)s" % record r2d = [ ('entry_mode','entry_mode'), ('sex','jamb_sex'), ('jamb_score','jamb_score'), ('jamb_reg_no','jamb_reg_no'), ] for r,d in r2d: dict[d] = record[r] app_doc.edit(mapping=dict) app.manage_setLocalRoles(sid, ['Owner',]) wftool.doActionFor(app,'close') record['sex'] = record['sex'] == 'F' student.invokeFactory('StudentPersonal','personal') dict = {} r2d = [('firstname','firstname'), ('middlename','middlename'), ('lastname','lastname'), ('sex','sex'), ('email','email'), ('phone','phone'), ('address','perm_address'), ] for r,d in r2d: dict[d] = record[r] per = student.personal per_doc = per.getContent() per_doc.edit(mapping = dict) per.manage_setLocalRoles(sid, ['Owner',]) # # Clearance # student.invokeFactory('StudentClearance','clearance') #wftool.doActionFor(student.clearance,'open') clearance = getattr(student,'clearance') dict = {'Title': 'Clearance/Eligibility Record'} clearance.getContent().edit(mapping = dict) student.clearance.manage_setLocalRoles(sid, ['Owner',]) # # Study Course # student.invokeFactory('StudentStudyCourse','study_course') study_course = student.study_course dict = {} r2d = [('session','current_session'), ('level','current_level'), ('verdict','current_verdict'), ('study_course','study_course'), ] for r,d in r2d: dict[d] = record[r] study_course.getContent().edit(mapping=dict) # # Study Level # level = record['level'] study_course.invokeFactory('StudentStudyLevel',level) study_level = getattr(study_course,level) dict = {} r2d = [('session','session'), ('level','code'), ('verdict','verdict'), ] for r,d in r2d: dict[d] = record[r] study_level.getContent().edit(mapping=dict) wftool.doActionFor(student,'return') imported.append(format % record) tr_count += 1 record['tr_count'] = tr_count record['total'] = total logger.info('%(total)s+%(tr_count)s: Creating Student %(student_id)s %(matric_no)s %(jamb_reg_no)s' % record) if tr_count > 1000: if len(not_imported) > 0: open("%s/import/%s_not_imported%s.csv" % (i_home,name,current),"a").write( '\n'.join(not_imported)+'\n') not_imported = [] open("%s/import/%simported%s.csv" % (i_home,name,current),"a").write( '\n'.join(imported) + '\n') imported = [] em = '%d transactions commited total %s' % (tr_count,total) transaction.commit() logger.info(em) regs = [] total += tr_count tr_count = 0 open("%s/import/%simported%s.csv" % (i_home,name,current),"a").write( '\n'.join(imported)) open("%s/import/%s_not_imported%s.csv" % (i_home,name,current),"a").write( '\n'.join(not_imported)) em = '%d transactions commited total %d' % (tr_count,total) logger.info(em) return self.REQUEST.RESPONSE.redirect("%s" % self.REQUEST.get('URL1')) ###) security.declareProtected(ModifyPortalContent,"createDEStudents")###( def createDEStudents(self): """load Fulltime Studentdata from CSV values""" import transaction import random #from pdb import set_trace wftool = self.portal_workflow students_folder = self.portal_catalog({'meta_type': 'StudentsFolder'})[-1].getObject() csv_d = {'jamb_reg_no': "RegNumber", 'jamb_lastname': "Name", 'session': "Session", 'pume_tot_score': "PUDE SCORE", ##'jamb_score': "JambScore", 'entry_mode': "EntryMode", 'jamb_sex': "Sex", 'jamb_state': "State", 'jamb_first_cos': "AdminCourse", 'faculty': "AdminFaculty", 'course_code': "AdmitCoscode", 'stud_status':"AdmitStatus", 'department': "AdmitDept", 'jamb_lga': "LGA", 'app_email': "email", 'app_mobile': "PhoneNumbers", } csv_fields = [f[1] for f in csv_d.items()] tr_count = 0 total = 0 #name = 'pume_results' name = 'DE_Admitted' no_import = [] s = ','.join(['"%s"' % fn for fn in csv_fields]) no_import.append('"Error",%s' % s) format = '"%(Error)s",' + ','.join(['"%%(%s)s"' % fn for fn in csv_fields]) no_certificate = "no certificate %s" % format open("%s/import/%s_not_imported.csv" % (i_home,name),"w").write('\n'.join(no_import)) logger = logging.getLogger('Students.StudentsFolder.createDEStudents') logger.info('Start loading from %s.csv' % name) l = self.portal_catalog({'meta_type': "Certificate"}) certs = {} cert_docs = {} for f in l: certs[f.getId] = f.getObject().getContent() try: result = csv.DictReader(open("%s/import/%s.csv" % (i_home,name),"rb")) except: logger.error('Error reading %s.csv' % name) return for jamb in result: jamb['Error'] = "Processing" logger.info(format % jamb) jamb_reg_no = jamb.get(csv_d['jamb_reg_no']) res = self.portal_catalog({'portal_type': "StudentApplication", 'SearchableText': jamb_reg_no }) if res: em = 'Student with RegNo %s already exists\n' % jamb_reg_no logger.info(em) jamb['Error'] = "Student exists" no_import.append(format % jamb) continue cert_id = makeCertificateCode(jamb.get(csv_d['course_code'])) if cert_id not in certs.keys(): em = 'No Certificate with ID %s \n' % cert_id logger.info(em) jamb['Error'] = "No Certificate %s" % cert_id no_import.append( format % jamb) continue jamb_reg_no =jamb.get(csv_d['jamb_reg_no']) cert_doc = certs[cert_id] catalog_entry = {} catalog_entry['jamb_reg_no'] = jamb_reg_no jamb_name = jamb.get(csv_d['jamb_lastname']) jamb_name.replace('>','') jamb_name.replace('<','') names = jamb_name.split() letter = names[-1][0].upper() sid = self.generateStudentId(letter) not_created = True while not_created: try: students_folder.invokeFactory('Student', sid) not_created = False except BadRequest: sid = self.generateStudentId(letter) catalog_entry['id'] = sid tr_count += 1 logger.info('%(total)s+%(tr_count)s: Creating Student with ID %(sid)s REG-NO %(jamb_reg_no)s ' % vars()) student = getattr(self,sid) student.manage_setLocalRoles(sid, ['Owner',]) student.invokeFactory('StudentPume','pume') dp = {'Title': 'Pume Data'} student.invokeFactory('StudentApplication','application') da = {'Title': 'Application Data'} da["jamb_lastname"] = jamb_name da_fields = ('jamb_reg_no', 'jamb_sex', 'entry_mode', #'jamb_score', 'jamb_first_cos', 'jamb_sex', 'jamb_state', 'jamb_lga', 'app_email', 'app_mobile', ) for f in da_fields: da[f] = jamb.get(csv_d[f]) catalog_entry['email'] = jamb.get(csv_d['app_email']) app = student.application app_doc = app.getContent() picture ="%s/import/pictures/%s.jpg" % (i_home,jamb_reg_no) #import pdb;pdb.set_trace() if os.path.exists(picture): file = open(picture) if False: img = PIL.Image.open(file) img.thumbnail((150,200), resample=PIL.Image.ANTIALIAS) # We now need a buffer to write to. It can't be the same # as the inbuffer as the PNG writer will write over itself. outfile = StringIO() img.save(outfile, format=img.format) else: outfile = file.read() app_doc.manage_addFile('passport', file=outfile, title="%s.jpg" % jamb_reg_no) app.getContent().edit(mapping=da) app.manage_setLocalRoles(sid, ['Owner',]) #wftool.doActionFor(app,'close') dp_fields = ( #'pume_eng_score', #'pume_gen_score', 'pume_tot_score', ) dp['pume_tot_score'] = jamb.get(csv_d['pume_tot_score']) or "No Option Shaded" pume = student.pume pume.getContent().edit(mapping=dp) #wftool.doActionFor(pume,'close') pume.manage_setLocalRoles(sid, ['Owner',]) #student.getContent().createSubObjects() dp = {} if len(names) == 3: dp['firstname'] = names[0].capitalize() dp['middlename'] = names[1].capitalize() dp['lastname'] = names[2].capitalize() elif len(names) == 2: dp['firstname'] = names[0].capitalize() dp['middlename'] = '' dp['lastname'] = names[1].capitalize() else: dp['firstname'] = '' dp['middlename'] = '' dp['lastname'] = jamb_name dp['sex'] = jamb.get(csv_d['jamb_sex']) == 'F' catalog_entry['sex'] = dp['sex'] catalog_entry['name'] = "%(firstname)s %(middlename)s %(lastname)s" % dp student.invokeFactory('StudentPersonal','personal') per = student.personal per_doc = per.getContent() per_doc.edit(mapping = dp) per.manage_setLocalRoles(sid, ['Owner',]) if jamb.get(csv_d['stud_status']) == "Admitted": wftool.doActionFor(student,'pume_pass') wftool.doActionFor(student,'admit') else: wftool.doActionFor(student,'pume_fail') wftool.doActionFor(student,'reject_admission') continue # # Clearance # student.invokeFactory('StudentClearance','clearance') #wftool.doActionFor(student.clearance,'open') dp = {'Title': 'Clearance/Eligibility Record'} student.clearance.manage_setLocalRoles(sid, ['Owner',]) # # Study Course # student.invokeFactory('StudentStudyCourse','study_course') study_course = student.study_course dsc = {} #from_certificate = ['title', # 'max_elect', # 'max_pass', # 'n_core', # 'nr_years', # 'probation_credits', # 'promotion_credits', # 'start_level', # ] #for f in from_certificate: # dsc[f] = getattr(cert_doc,f) #dsc['faculty'] = jamb.get(csv_d['faculty']) #dsc['department'] = jamb.get(csv_d['department']) catalog_entry['faculty'] = jamb.get(csv_d['faculty']) catalog_entry['department'] = jamb.get(csv_d['department']) catalog_entry['course'] = cert_id #catalog_entry['level'] = getattr(cert_doc,'start_level') catalog_entry['level'] = '200' dsc['study_course'] = cert_id dsc['current_level'] = '200' #dsc['entry_session'] = jamb.get(csv_d['session']) study_course.getContent().edit(mapping=dsc) self.students_catalog.addRecord(**catalog_entry) if tr_count > 10: if len(no_import) > 1: open("%s/import/%s_not_imported.csv" % (i_home,name),"a").write( '\n'.join(no_import)+'\n') no_import = [] em = '%d transactions commited\n' % tr_count transaction.commit() logger.info(em) total += tr_count tr_count = 0 open("%s/import/%s_not_imported.csv" % (i_home,name),"a").write( '\n'.join(no_import)) return self.REQUEST.RESPONSE.redirect("%s" % self.REQUEST.get('URL1')) ###) security.declareProtected(ModifyPortalContent,"createNewStudents")###( def createNewStudents(self): """load Fulltime Studentdata from CSV values""" import transaction import random #from pdb import set_trace wftool = self.portal_workflow students_folder = self.portal_catalog({'meta_type': 'StudentsFolder'})[-1].getObject() csv_d = {'jamb_reg_no': "RegNumber", 'jamb_lastname': "Name", 'session': "Session", 'pume_tot_score': "PUME SCORE", 'jamb_score': "JambScore", 'jamb_sex': "Sex", 'jamb_state': "State", ## 'jamb_first_cos': "AdminCourse", 'faculty': "AdminFaculty", 'course_code': "AdmitCoscode", 'stud_status':"AdmitStatus", 'department': "AdmitDept", 'jamb_lga': "LGA", 'app_email': "email", 'app_mobile': "PhoneNumbers", } csv_fields = [f[1] for f in csv_d.items()] tr_count = 0 total = 0 #name = 'pume_results' name = 'Admitted' no_import = [] s = ','.join(['"%s"' % fn for fn in csv_fields]) no_import.append('"Error",%s' % s) format = '"%(Error)s",' + ','.join(['"%%(%s)s"' % fn for fn in csv_fields]) no_certificate = "no certificate %s" % format open("%s/import/%s_not_imported.csv" % (i_home,name),"w").write('\n'.join(no_import)) logger = logging.getLogger('Students.StudentsFolder.createNewStudents') logger.info('Start loading from %s.csv' % name) l = self.portal_catalog({'meta_type': "Certificate"}) certs = {} cert_docs = {} for f in l: certs[f.getId] = f.getObject().getContent() try: result = csv.DictReader(open("%s/import/%s.csv" % (i_home,name),"rb")) except: logger.error('Error reading %s.csv' % name) return for jamb in result: jamb['Error'] = "Processing " logger.info(format % jamb) jamb_reg_no = jamb.get(csv_d['jamb_reg_no']) res = self.portal_catalog({'portal_type': "StudentApplication", 'SearchableText': jamb_reg_no }) if res: em = 'Student with RegNo %s already exists\n' % jamb_reg_no logger.info(em) jamb['Error'] = "Student exists" no_import.append(format % jamb) continue cert_id = makeCertificateCode(jamb.get(csv_d['course_code'])) if cert_id not in certs.keys(): em = 'No Certificate with ID %s \n' % cert_id logger.info(em) jamb['Error'] = "No Certificate %s" % cert_id no_import.append( format % jamb) continue res = self.portal_pumeresults(jamb_reg_no = jamb_reg_no) if len(res) == 1: self.portal_pumeresults.modifyRecord(jamb_reg_no = jamb_reg_no, status = jamb.get(csv_d['stud_status']), ) jamb_reg_no =jamb.get(csv_d['jamb_reg_no']) cert_doc = certs[cert_id] catalog_entry = {} catalog_entry['jamb_reg_no'] = jamb_reg_no jamb_name = jamb.get(csv_d['jamb_lastname']) jamb_name.replace('>','') jamb_name.replace('<','') names = jamb_name.split() letter = names[-1][0].upper() sid = self.generateStudentId(letter) not_created = True while not_created: try: students_folder.invokeFactory('Student', sid) not_created = False except BadRequest: sid = self.generateStudentId(letter) catalog_entry['id'] = sid tr_count += 1 logger.info('%(total)s+%(tr_count)s: Creating Student with ID %(sid)s reg_no %(jamb_reg_no)s ' % vars()) student = getattr(self,sid) student.manage_setLocalRoles(sid, ['Owner',]) student.invokeFactory('StudentPume','pume') dp = {'Title': 'Pume Data'} student.invokeFactory('StudentApplication','application') da = {'Title': 'Application Data'} da["jamb_lastname"] = jamb_name da_fields = ('jamb_reg_no', 'jamb_sex', #'jamb_state', 'jamb_score', ## 'jamb_first_cos', 'jamb_sex', 'jamb_state', 'jamb_lga', 'app_email', 'app_mobile', ) for f in da_fields: da[f] = jamb.get(csv_d[f]) catalog_entry['email'] = jamb.get(csv_d['app_email']) app = student.application app_doc = app.getContent() #import pdb;pdb.set_trace() picture ="%s/import/pictures/%s.jpg" % (i_home,picture_id) if os.path.exists(picture): file = open(picture) if False: img = PIL.Image.open(file) img.thumbnail((150,200), resample=PIL.Image.ANTIALIAS) # We now need a buffer to write to. It can't be the same # as the inbuffer as the PNG writer will write over itself. outfile = StringIO() img.save(outfile, format=img.format) else: outfile = file.read() app_doc.manage_addFile('passport', file=outfile, title="%s.jpg" % jamb_reg_no) app.getContent().edit(mapping=da) app.manage_setLocalRoles(sid, ['Owner',]) #wftool.doActionFor(app,'close') dp_fields = ( #'pume_eng_score', #'pume_gen_score', 'pume_tot_score', ) dp['pume_tot_score'] = jamb.get(csv_d['pume_tot_score']) or "No Option Shaded" pume = student.pume pume.getContent().edit(mapping=dp) #wftool.doActionFor(pume,'close') pume.manage_setLocalRoles(sid, ['Owner',]) #student.getContent().createSubObjects() dp = {} if len(names) == 3: dp['firstname'] = names[0].capitalize() dp['middlename'] = names[1].capitalize() dp['lastname'] = names[2].capitalize() elif len(names) == 2: dp['firstname'] = names[0].capitalize() dp['middlename'] = '' dp['lastname'] = names[1].capitalize() else: dp['firstname'] = '' dp['middlename'] = '' dp['lastname'] = jamb_name dp['sex'] = jamb.get(csv_d['jamb_sex']) == 'F' catalog_entry['sex'] = dp['sex'] catalog_entry['name'] = "%(firstname)s %(middlename)s %(lastname)s" % dp student.invokeFactory('StudentPersonal','personal') per = student.personal per_doc = per.getContent() per_doc.edit(mapping = dp) per.manage_setLocalRoles(sid, ['Owner',]) if jamb.get(csv_d['stud_status']) == "Admitted": wftool.doActionFor(student,'pume_pass') wftool.doActionFor(student,'admit') else: wftool.doActionFor(student,'pume_fail') wftool.doActionFor(student,'reject_admission') continue # # Clearance # student.invokeFactory('StudentClearance','clearance') #wftool.doActionFor(student.clearance,'open') dp = {'Title': 'Clearance/Eligibility Record'} student.clearance.manage_setLocalRoles(sid, ['Owner',]) # # Study Course # student.invokeFactory('StudentStudyCourse','study_course') study_course = student.study_course dsc = {} #from_certificate = ['title', # 'max_elect', # 'max_pass', # 'n_core', # 'nr_years', # 'probation_credits', # 'promotion_credits', # 'start_level', # ] #for f in from_certificate: # dsc[f] = getattr(cert_doc,f) #dsc['faculty'] = jamb.get(csv_d['faculty']) #dsc['department'] = jamb.get(csv_d['department']) catalog_entry['faculty'] = jamb.get(csv_d['faculty']) catalog_entry['department'] = jamb.get(csv_d['department']) catalog_entry['course'] = cert_id #catalog_entry['level'] = getattr(cert_doc,'start_level') catalog_entry['level'] = '100' dsc['study_course'] = cert_id #dsc['entry_level'] = '100' #dsc['entry_session'] = jamb.get(csv_d['session']) study_course.getContent().edit(mapping=dsc) self.students_catalog.addRecord(**catalog_entry) if tr_count > 10: if len(no_import) > 0: open("%s/import/%s_not_imported.csv" % (i_home,name),"a").write( '\n'.join(no_import) + "\n") no_import = [] em = '%d transactions commited\n' % tr_count transaction.commit() logger.info(em) total += tr_count tr_count = 0 open("%s/import/%s_not_imported.csv" % (i_home,name),"a").write( '\n'.join(no_import)) return self.REQUEST.RESPONSE.redirect("%s" % self.REQUEST.get('URL1')) ###) security.declareProtected(ModifyPortalContent,"fixVerdicts")###( def fixVerdicts(self,csv_file=None): """fix wrong uploaded verdicts""" import transaction import random wftool = self.portal_workflow current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S") students_folder = self.portal_url.getPortalObject().campus.students tr_count = 1 total = 0 if csv_file is None: name = 'Verdicts' else: name = csv_file st_cat = self.students_catalog no_import = [] verdicts_voc = self.portal_vocabularies.verdicts rverdicts = {} for k,v in verdicts_voc.items(): rverdicts[v.upper()] = k rverdicts['STUDENT ON PROBATION'] = 'C' logger = logging.getLogger('Students.StudentsFolder.fixVerdicts') try: verdicts = csv.DictReader(open("%s/import/%s.csv" % (i_home,name),"rb")) except: logger.error('Error reading %s.csv' % name) return start = True #import pdb;pdb.set_trace() for verdict in verdicts: if start: start = False logger.info('Start loading from %s.csv' % name) s = ','.join(['"%s"' % fn for fn in verdict.keys()]) no_import.append('%s,"Error"' % s) format = ','.join(['"%%(%s)s"' % fn for fn in verdict.keys()]) format_error = format + ',"%(Error)s"' matric_no = verdict.get('MAT NO') if not matric_no: continue matric_no = matric_no.upper() if matric_no == '': continue verdict_code = rverdicts.get(verdict.get('CATEGORY'),None) if verdict_code is None: continue sres = st_cat(matric_no = matric_no) if sres: student_id = sres[0].id student_obj = getattr(students_folder,student_id,None) if student_obj: study_course = getattr(student_obj,'study_course',None) if study_course is None: verdict['Error'] = "Student did not yet log in" no_import.append( format_error % verdict) continue st_cat.modifyRecord(id = student_id, verdict=verdict_code) dsc = {} dsc['current_verdict'] = verdict_code study_course.getContent().edit(mapping=dsc) else: verdict['Error'] = "Not found in students_catalog" no_import.append( format_error % verdict) continue tr_count += 1 if tr_count > 1000: if len(no_import) > 0: open("%s/import/%s_not_imported%s.csv" % (i_home,name,current),"a").write( '\n'.join(no_import) + '\n') no_import = [] em = '%d transactions commited total %s\n' % (tr_count,total) transaction.commit() regs = [] logger.info(em) total += tr_count tr_count = 0 open("%s/import/%s_not_imported%s.csv" % (i_home,name,current),"a").write( '\n'.join(no_import)) total += tr_count em = '%d total transactions commited' % (total) logger.info(em) return self.REQUEST.RESPONSE.redirect("%s" % self.REQUEST.get('URL1')) ###) security.declareProtected(ModifyPortalContent,"fixAllEntryModeForReturning")###( def fixAllEntryModeForReturning(self): "read all Returning*.csv" ipath = "%s/import/" % i_home names = os.listdir(ipath) for name in names: head,tail = os.path.splitext(name) if head.startswith('Returning')\ and tail == '.csv'\ and name.find('imported') < 0: self.fixEntryModeForReturning(csv_file=head) ###) security.declareProtected(ModifyPortalContent,"fixEntryModeForReturning")###( def fixEntryModeForReturning(self,csv_file=None): """load Returning Studentdata from CSV values""" import transaction import random wftool = self.portal_workflow current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S") students_folder = self.portal_url.getPortalObject().campus.students tr_count = 1 total = 0 if csv_file is None: name = 'Returning' else: name = csv_file table = self.returning_import st_cat = self.students_catalog no_import = [] logger = logging.getLogger('Students.StudentsFolder.fixEntryModeForReturning') try: returning = csv.DictReader(open("%s/import/%s.csv" % (i_home,name),"rb")) except: logger.error('Error reading %s.csv' % name) return start = True for student in returning: if start: start = False logger.info('Start loading from %s.csv' % name) s = ','.join(['"%s"' % fn for fn in student.keys()]) no_import.append('%s,"Error"' % s) format = ','.join(['"%%(%s)s"' % fn for fn in student.keys()]) format_error = format + ',"%(Error)s"' matric_no = student.get('matric_no') if not matric_no: continue matric_no = matric_no.upper() student['matric_no'] = matric_no if matric_no == '': continue if not table(matric_no = matric_no): student['Error'] = "Not imported yet" no_import.append( format_error % student) continue student_id = None app = None per = None if st_cat(matric_no = matric_no): student_id = st_cat(matric_no = matric_no)[0].id student_obj = getattr(students_folder,student_id,None) if student_obj: app = getattr(student_obj,'application',None) if app is not None: app_doc = app.getContent() per = getattr(student_obj,'personal',None) if per is not None: per_doc = per.getContent() student['Mode_of_Entry'] = entry_mode = student.get('Mode of Entry').upper() student['Permanent_Address'] = perm_address = student.get('Permanent Address') #import pdb;pdb.set_trace() if not entry_mode: student['Error'] = "'Mode of Entry' empty" no_import.append( format_error % student) continue try: table.modifyRecord(matric_no = matric_no, Mode_of_Entry = entry_mode, Permanent_Address = perm_address) except KeyError: student['Error'] = "Not found in returning_import" no_import.append( format_error % student) continue if student_id is not None: try: st_cat.modifyRecord(id = student_id, entry_mode=entry_mode) except KeyError: student['Error'] = "Not found in students_catalog" no_import.append( format_error % student) continue if app is not None: da = {} da['entry_mode'] = entry_mode app_doc.edit(mapping=da) if per is not None: dp = {} dp['perm_address'] = perm_address per_doc.edit(mapping=dp) tr_count += 1 if tr_count > 1000: if len(no_import) > 0: open("%s/import/%s_not_imported%s.csv" % (i_home,name,current),"a").write( '\n'.join(no_import) + '\n') no_import = [] em = '%d transactions commited total %s\n' % (tr_count,total) transaction.commit() regs = [] logger.info(em) total += tr_count tr_count = 0 open("%s/import/%s_not_imported%s.csv" % (i_home,name,current),"a").write( '\n'.join(no_import)) return self.REQUEST.RESPONSE.redirect("%s" % self.REQUEST.get('URL1')) ###) security.declareProtected(ModifyPortalContent,"loadPumeResultsFromCSV")###( def loadPumeResultsFromCSV(self): """load Fulltime Studentdata from CSV values into pumeresults catalog""" import transaction import random ## csv_d = {'jamb_reg_no': "RegNumber", ###( ## 'status': "Admission Status", ## 'name': "Name", ## 'score': "Score", ## 'sex': "Sex", ## 'faculty': "Faculty", ## 'department': "Dept", ## 'course': "Course", ## 'course_code_org': "Course Code", ## } ###) ## csv_d = {'jamb_reg_no': "JAMBRegno", ## 'name': "Name", ## 'score': "Score", ## 'sex': "Sex", ## 'course': "Course", ## 'faculty': "Faculty", ## 'department': "Dept", ## 'course_code_org': "Course Code", ## 'status': "Admission Status", ## 'result_type': None, ## } csv_d = {'jamb_reg_no': "reg_no", 'name': "fullname", 'score': "pume_score", 'sex': "sex", 'course': "study_course", 'course_code_org': "study_course", 'status': "admission_status", 'result_type': "entry_mode", } csv_fields = [f[1] for f in csv_d.items() if f[1]] tr_count = 0 total = 0 #name = 'pup_new' #name = 'pup_update' name = 'Admitted_update' update = name.endswith('update') no_import = [] ok_import = [] ok_import.append('%s' % ','.join(['"%s"' % fn for fn in csv_d.keys()])) no_import.append('%s' % ','.join(['"%s"' % fn for fn in csv_fields])) current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S") ok_import_name = "%s/import/%s_imported_%s.csv" % (i_home,name,current) #open(ok_import_name,"w").write('\n'.join(no_import)) no_import_name = "%s/import/%s_not_imported_%s.csv" % (i_home,name,current) #open(no_import_name,"w").write('\n'.join(no_import)) logger = logging.getLogger('Students.loadPumeResultsFromCSV') starttime = DateTime.now() logger.info('Start loading from %s.csv' % name) try: result = csv.DictReader(open("%s/import/%s.csv" % (i_home,name),"rb")) except: logger.error('Error reading %s.csv' % name) return pume = self.portal_pumeresults format = ','.join(['"%%(%s)s"' % fn for fn in csv_fields]) import_format = ','.join(['"%%(%s)s"' % fn for fn in csv_d.keys()]) eduplicate = '%s,"duplicate"' % format eoriginal = '%s,"original"' % format einvalidjamb = '%s,"invalid JambRegNo"' % format added = 'added ,%s' % format #from pdb import set_trace;set_trace() for jamb in result: if not jamb.get(csv_d['score']): logger.info('Student %s has no pume_score' % jamb.get(csv_d['jamb_reg_no'])) continue dict = {} for f,fn in csv_d.items(): dict[f] = jamb.get(csv_d[f]) dict['result_type'] = 'CEST' jnr = jamb.get(csv_d['jamb_reg_no']) #if not checkJambNo(jnr): # logger.info(einvalidjamb % jamb) # dd = {} # for f,fn in csv_d.items(): # dd[fn] = getattr(data,f) # no_import.append(eduplicate % dd) # no_import.append(eduplicate % jamb) # continue res = pume(jamb_reg_no=jnr) if len(res) > 0: if update: try: pume.modifyRecord(**dict) # Can not happen, but anyway... except ValueError: logger.info(eduplicate % jamb) continue # Can not happen, but anyway... except KeyError: pume.addRecord(**dict) logger.info(added % jamb) continue else: data = res[0] if data.name != jamb.get(csv_d['name']): #set_trace() logger.info(eduplicate % jamb) #em = 'Student with REG-NO %(jamb_reg_no)s already exists\n' % dict #logger.info(em) dd = {} for f,fn in csv_d.items(): dd[fn] = getattr(data,f) no_import.append(eoriginal % dd) no_import.append(eduplicate % jamb) continue else: try: pume.addRecord(**dict) ok_import.append(import_format % dict) except ValueError: logger.info(eduplicate % jamb) #em = 'Student with REG-NO %(jamb_reg_no)s already exists\n' % dict #logger.info(em) no_import.append(eduplicate % jamb) logger.info('End loading from %s.csv' % name) if len(no_import) > 1: open(no_import_name,"w+").write('\n'.join(no_import)) open(ok_import_name,"w+").write('\n'.join(ok_import)) return self.REQUEST.RESPONSE.redirect("%s" % self.REQUEST.get('URL1')) ###) security.declareProtected(ModifyPortalContent,"createStudents")###( def createStudents(self): """ load addmitted Studentdata from CSV values and create Studentobjects. This is the current method to create new addmitted Students. Before running the eventservice for the students_catalog must be disabled. """ import transaction import random #from pdb import set_trace wftool = self.portal_workflow #students_folder = self.portal_catalog({'meta_type': 'StudentsFolder'})[-1].getObject() students_folder = self.portal_url.getPortalObject().campus.students levels = {'ume_ft':'100', 'de_ft': '200', 'ug_pt': '100', 'de_pt': '200', 'pg_ft': '700', 'pg_pt': '700', 'dp_pt': '100', 'dp_ft': '100', } csv_d = {'jamb_reg_no': "reg_no", 'entry_mode': 'entry_mode', 'jamb_firstname': "firstname", 'jamb_middlename': "middlename", 'jamb_lastname': "lastname", 'jamb_sex': "sex", 'jamb_state': "state", 'birthday': "date_of_birth", 'app_email': "email", 'study_course': "study_course", 'perm_address': "address", 'admission_status': "admission_status", } csv_fields = [f[1] for f in csv_d.items()] tr_count = 0 total = 0 #name = 'pume_results' name = 'Admitted_update' no_import = [] s = ','.join(['"%s"' % fn for fn in csv_fields]) no_import.append('"Error",%s' % s) format = '"%(Error)s",' + ','.join(['"%%(%s)s"' % fn for fn in csv_fields]) no_certificate = "no certificate %s" % format open("%s/import/%s_not_imported.csv" % (i_home,name),"w").write('\n'.join(no_import)) logger = logging.getLogger('Students.StudentsFolder.createStudents') logger.info('Start loading from %s.csv' % name) certs = {} try: results = csv.DictReader(open("%s/import/%s.csv" % (i_home,name),"rb")) except: logger.error('Error reading %s.csv' % name) return for result in results: if not result.get(csv_d['admission_status']).startswith('Admitted'): continue #result['Error'] = "Processing " #logger.info(format % result) jamb_reg_no = result.get(csv_d['jamb_reg_no']) res = self.students_catalog(jamb_reg_no = jamb_reg_no) if res: em = 'Student with RegNo %s already exists\n' % jamb_reg_no logger.info(em) result['Error'] = "Student exists" no_import.append(format % result) continue cert_id = makeCertificateCode(result.get(csv_d['study_course'])) if cert_id not in certs.keys(): res = self.portal_catalog(meta_type = "Certificate",id = cert_id) if not res: em = 'No Certificate with ID %s \n' % cert_id logger.info(em) result['Error'] = "No Certificate %s" % cert_id no_import.append( format % result) continue cert = res[0] cert_path = cert.getPath().split('/') certificate = certs[cert_id] = {'faculty': cert_path[-4], 'department': cert_path[-3]} cert_doc = certs[cert_id] catalog_entry = {} catalog_entry['jamb_reg_no'] = jamb_reg_no firstname = result.get(csv_d['jamb_firstname']) middlename = result.get(csv_d['jamb_middlename']) lastname = result.get(csv_d['jamb_lastname']) if len(firstname) < 3\ and len(middlename) < 3\ and len(lastname) < 3: em = 'Student Names to short \n' logger.info(em) result['Error'] = "Names to short" no_import.append( format % result) continue perm_address = result.get(csv_d['perm_address']) sid = self.generateStudentId('x') students_folder.invokeFactory('Student', sid) catalog_entry['id'] = sid tr_count += 1 logger.info('%(total)s+%(tr_count)s: Creating Student with ID %(sid)s reg_no %(jamb_reg_no)s ' % vars()) student = getattr(self,sid) student.manage_setLocalRoles(sid, ['Owner',]) student.invokeFactory('StudentApplication','application') da = {'Title': 'Application Data'} da["jamb_firstname"] = firstname da["jamb_middlename"] = middlename da["jamb_lastname"] = lastname catalog_entry['entry_session'] = da["entry_session"] = self.getSessionId()[0] catalog_entry['sex'] = sex = result.get(csv_d['jamb_sex']).startswith('F') da_fields = ('jamb_reg_no', 'jamb_sex', 'jamb_state', 'entry_mode', 'app_email', ) for f in da_fields: da[f] = result.get(csv_d[f]) catalog_entry['email'] = da['app_email'] catalog_entry['entry_mode'] = da['entry_mode'] app = student.application app_doc = app.getContent() app.getContent().edit(mapping=da) picture ="%s/import/pictures/%s.jpg" % (i_home,jamb_reg_no) app.manage_setLocalRoles(sid, ['Owner',]) picture_id = da['jamb_reg_no'].replace('/','_') file = None for ext in ('jpg','JPG'): picture ="%s/import/pictures_admitted_latest/%s.%s" % (i_home,picture_id,ext) if os.path.exists(picture): file = open(picture) break if file is not None: ## file conversion does not work #img = PIL.Image.open(file) #img.thumbnail((150,200), # resample=PIL.Image.ANTIALIAS) #outfile = StringIO() #img.save(outfile, format=img.format) outfile = file.read() app_doc.manage_addFile('passport', file=outfile, title="%s.jpg" % jamb_reg_no) #wftool.doActionFor(app,'close') dp = {} dp['firstname'] = firstname dp['middlename'] = middlename dp['lastname'] = lastname dp['email'] = da['app_email'] dp['sex'] = sex dp['perm_address'] = perm_address catalog_entry['name'] = "%(firstname)s %(middlename)s %(lastname)s" % dp student.invokeFactory('StudentPersonal','personal') per = student.personal per_doc = per.getContent() per_doc.edit(mapping = dp) per.manage_setLocalRoles(sid, ['Owner',]) wftool.doActionFor(student,'pume_pass') wftool.doActionFor(student,'admit') # # Clearance # student.invokeFactory('StudentClearance','clearance') #wftool.doActionFor(student.clearance,'open') clearance = student.clearance dc = {'Title': 'Clearance/Eligibility Record'} clearance = student.clearance date_str = result.get(csv_d['birthday']) try: date = DateTime.DateTime(date_str) except: #import pdb;pdb.set_trace() date = None dc['birthday'] = date clearance.getContent().edit(mapping=dc) clearance.manage_setLocalRoles(sid, ['Owner',]) # # Study Course # student.invokeFactory('StudentStudyCourse','study_course') study_course = student.study_course dsc = {} catalog_entry['level'] = dsc['current_level'] = levels.get(da['entry_mode'],'100') #catalog_entry['level'] = dsc['current_level'] = '100' # Attention: not for DE students catalog_entry['session'] = dsc['current_session'] = da['entry_session'] catalog_entry['mode'] = dsc['current_mode'] = da['entry_mode'] catalog_entry['course'] = dsc['study_course'] = cert_id catalog_entry['faculty'] = certificate['faculty'] catalog_entry['department'] = certificate['department'] catalog_entry['verdict'] = dsc['current_verdict'] = 'N/A' catalog_entry['review_state'] = self.portal_workflow.getInfoFor(student,'review_state',None) study_course.getContent().edit(mapping=dsc) #import pdb;pdb.set_trace() self.students_catalog.addRecord(**catalog_entry) if tr_count > 100: if len(no_import) > 0: open("%s/import/%s_not_imported.csv" % (i_home,name),"a").write( '\n'.join(no_import) + "\n") no_import = [] em = '%d transactions commited\n' % tr_count transaction.commit() logger.info(em) total += tr_count tr_count = 0 open("%s/import/%s_not_imported.csv" % (i_home,name),"a").write( '\n'.join(no_import)) return self.REQUEST.RESPONSE.redirect("%s" % self.REQUEST.get('URL1')) ###) security.declareProtected(ModifyPortalContent,"importReturningStudents")###( def importReturningStudents(self): """load Returning Studentdata from CSV values""" import transaction import random #from pdb import set_trace wftool = self.portal_workflow current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S") #students_folder = self.portal_catalog({'meta_type': 'StudentsFolder'})[-1].getObject() students_folder = self.portal_url.getPortalObject().campus.students tr_count = 1 total = 0 #name = 'pume_results' name = 'Returning' table = self.returning_import no_import = [] imported = [] logger = logging.getLogger('Students.StudentsFolder.importReturningStudents') try: returning = csv.DictReader(open("%s/import/%s.csv" % (i_home,name),"rb")) except: logger.error('Error reading %s.csv' % name) return l = self.portal_catalog({'meta_type': "Certificate"}) certs = {} cert_docs = {} for f in l: certs[f.getId] = f.getObject().getContent() start = True res = table() regs = [] if len(res) > 0: regs = [s.matric_no for s in res] #import pdb;pdb.set_trace() for student in returning: if start: start = False logger.info('Start loading from %s.csv' % name) s = ','.join(['"%s"' % fn for fn in student.keys()]) imported.append(s) no_import.append('%s,"Error"' % s) format = ','.join(['"%%(%s)s"' % fn for fn in student.keys()]) format_error = format + ',"%(Error)s"' no_certificate = "no certificate %s" % format student['matric_no'] = matric_no = student.get('matric_no').upper() student['Mode_of_Entry'] = entry_mode = student.get('Mode of Entry').upper() student['Permanent_Address'] = perm_address = student.get('Permanent Address') if matric_no == '': student['Error'] = "Empty matric_no" no_import.append( format_error % student) continue if matric_no in regs or self.returning_import(matric_no = matric_no): student['Error'] = "Duplicate" no_import.append( format_error % student) continue cert_id = makeCertificateCode(student.get('Coursemajorcode')) if cert_id not in certs.keys(): student['Error'] = "No Certificate %s" % cert_id no_import.append( format_error % student) continue try: table.addRecord(**student) except ValueError: student['Error'] = "Duplicate" no_import.append( format_error % student) continue regs.append(student.get('matric_no')) imported.append(format % student) tr_count += 1 if tr_count > 1000: if len(no_import) > 0: open("%s/import/%s_not_imported%s.csv" % (i_home,name,current),"a").write( '\n'.join(no_import) + '\n') no_import = [] open("%s/import/%simported%s.csv" % (i_home,name,current),"a").write( '\n'.join(no_import) + "\n") imported = [] em = '%d transactions commited total %s\n' % (tr_count,total) transaction.commit() regs = [] logger.info(em) total += tr_count tr_count = 0 open("%s/import/%simported%s.csv" % (i_home,name,current),"a").write( '\n'.join(imported)) open("%s/import/%s_not_imported%s.csv" % (i_home,name,current),"a").write( '\n'.join(no_import)) return self.REQUEST.RESPONSE.redirect("%s" % self.REQUEST.get('URL1')) ###) security.declareProtected(ModifyPortalContent,"fixAllNames")###( def fixAllNames(self): "fix all students names" import transaction response = self.REQUEST.RESPONSE logger = logging.getLogger('fixAllNames') logger.info('Start') students = self.portal_catalog(portal_type='Student') count = 0 total = 0 for student in students: scat_res = self.students_catalog(id = student.getId) if not scat_res: self.students_catalog.addRecord(id = student.getId) scat_res = self.students_catalog(id = student.getId) student_entry = scat_res[0] old_new = self.fixName(student,student_entry) count += 1 response_write(response,'"%d","%s",%s' % (count + total,student_entry.id,old_new)) if count > 2000: transaction.commit() logger.info("%d transactions commited" % count) total += count count = 0 ###) security.declareProtected(ModifyPortalContent,"fixName")###( def fixName(self,student_brain, student_entry): "fix the name of a student" fix = "first" if student_entry.get('name_fixed',None) == fix: return "Name already fixed" student_id = student_entry.id new_student = student_entry.jamb_reg_no.startswith('6') student_obj = student_brain.getObject() personal = getattr(student_obj,'personal',None) invalid = '' if personal is None: return '"%s","Returning","%s","%s"' % (invalid,student_entry.name,"not logged in") per_doc = personal.getContent() old_first = per_doc.firstname old_middle = per_doc.middlename old_last = per_doc.lastname new_first = '' new_middle = '' new_last = '' if new_student: if not old_first and not old_middle and old_last: new_names = [n.capitalize() for n in old_last.split()] if len(new_names) > 1: old_first = new_names[0] old_last = new_names[-1] old_middle = ' '.join(new_names[1:-1]) else: old_last = new_names[0] old_first = '' old_middle = '' if old_first: new_first = old_first if old_middle: new_middle = old_middle if old_last: new_last = old_last if old_first.find('<') != -1 or\ old_first.find('>') != -1 or\ old_middle.find('<') != -1 or\ old_middle.find('>') != -1 or\ old_last.find('<') != -1 or\ old_last.find('>') != -1: invalid = "invalid characters" else: new_first = old_first if new_first.strip() == '-': new_first = '' new_middle = old_middle if new_middle.strip() == '-': new_middle = '' new_last = old_last if new_last.strip() == '-': new_last = '' name = "%(new_first)s %(new_middle)s %(new_last)s" % vars() if new_student: text = "New" else: text = "Returning" old_new = '"%s","%s","%s","%s"' % (invalid,text, student_entry.name, name) if not invalid: self.students_catalog.modifyRecord(id = student_id, name_fixed = fix, name = name) per_doc.edit(mapping = {'firstname' : new_first, 'middlename' : new_middle, 'lastname' : new_last, }) return old_new ###) security.declareProtected(ModifyPortalContent,"updateReturningStudents")###( def updateReturningStudents(self): """load and overwrite Returning Student Data from CSV values""" import transaction import random #from pdb import set_trace wftool = self.portal_workflow current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S") #students_folder = self.portal_catalog({'meta_type': 'StudentsFolder'})[-1].getObject() students_folder = self.portal_url.getPortalObject().campus.students tr_count = 1 total = 0 #name = 'pume_results' name = 'Returning_update' table = self.returning_import no_import = [] imported = [] logger = logging.getLogger('Students.StudentsFolder.updateReturningStudents') try: returning = csv.DictReader(open("%s/import/%s.csv" % (i_home,name),"rb")) except: logger.error('Error reading %s.csv' % name) return l = self.portal_catalog({'meta_type': "Certificate"}) certs = {} cert_docs = {} for f in l: certs[f.getId] = f.getObject().getContent() start = True res = table() regs = [] if len(res) > 0: regs = [s.matric_no for s in res] for student in returning: if start: start = False logger.info('Start loading from %s.csv' % name) s = ','.join(['"%s"' % fn for fn in student.keys()]) imported.append(s) no_import.append('%s,"Error"' % s) format = ','.join(['"%%(%s)s"' % fn for fn in student.keys()]) format_error = format + ',"%(Error)s"' no_certificate = "no certificate %s" % format matric_no = student.get('matric_no').upper() student['matric_no'] = matric_no if matric_no == '': student['Error'] = "Empty matric_no" no_import.append( format_error % student) continue # if matric_no in regs or self.returning_import(matric_no = matric_no): # student['Error'] = "Duplicate" # no_import.append( format_error % student) # continue # cert_id = makeCertificateCode(student.get('Coursemajorcode')) # if cert_id not in certs.keys(): # student['Error'] = "No Certificate %s" % cert_id # no_import.append( format_error % student) # continue try: table.modifyRecord(**student) except KeyError: #import pdb;pdb.set_trace() student['Error'] = "no Student found to update" no_import.append( format_error % student) continue #s = self.students_catalog(matric_no=matric_no) #if s: # level = "%s" % (int(student.get('Level')) + 100) # self.students_catalog.modifyRecord(id = s[0].id, # level=level) regs.append(student.get('matric_no')) imported.append(format % student) tr_count += 1 if tr_count > 1000: if len(no_import) > 0: open("%s/import/%s_not_imported%s.csv" % (i_home,name,current),"a").write( '\n'.join(no_import) + '\n') no_import = [] open("%s/import/%simported%s.csv" % (i_home,name,current),"a").write( '\n'.join(no_import) + "\n") imported = [] em = '%d transactions commited total %s\n' % (tr_count,total) transaction.commit() regs = [] logger.info(em) total += tr_count tr_count = 0 open("%s/import/%simported%s.csv" % (i_home,name,current),"a").write( '\n'.join(imported)) open("%s/import/%s_not_imported%s.csv" % (i_home,name,current),"a").write( '\n'.join(no_import)) return self.REQUEST.RESPONSE.redirect("%s" % self.REQUEST.get('URL1')) ###) security.declareProtected(ModifyPortalContent,"exportStudents")###( def exportStudents(self): """export Studentsdata to a file""" member = self.portal_membership.getAuthenticatedMember() logger = logging.getLogger('Students.exportStudents') current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S") students_folder = self.portal_url.getPortalObject().campus.students export_file = "%s/export/students_%s.csv" % (i_home,current) from Products.AdvancedQuery import Eq, Between, Le,In,MatchRegexp aq_students = self.students_catalog.evalAdvancedQuery toexport = {'students_catalog': ("id", "matric_no", "jamb_reg_no", "name", "review_state", "entry_session", "session", "entry_mode", "mode", "verdict", "sex", "email", "phone", "faculty", "department", "course", "level", ), 'personal': ('firstname', 'middlename', 'lastname', 'perm_address', ), 'clearance': ('state', # is not an attribute of clearance but is needed for splitting lga 'lga', ) } res_list = [] lines = [] fields = [] with_lga = False for k in toexport.keys(): for f in toexport[k]: if f == 'lga' : with_lga = True fields.append(f) headline = ','.join(fields).replace('level','current_level') open(export_file,"a").write(headline +'\n') format = '"%(' + ')s","%('.join(fields) + ')s"' query = In('review_state',('cleared_and_validated', 'clearance_requested', 'school_fee_paid', 'courses_registered', 'courses_validated')) #query = In('review_state',('clearance_requested')) students = aq_students(query) nr2export = len(students) logger.info('%s starts exportStudents, %s student records to export' % (member,nr2export)) chunk = 1000 total = 0 start = DateTime.DateTime().timeTime() start_chunk = DateTime.DateTime().timeTime() ## alternative method slightly slower # student_recs = {} # for s in students: # student_recs[s.id] = s # catalog_recs = {} # brains = self.portal_catalog(portal_type = 'Student') # for cat_rec in brains: # sid = cat_rec.getId # catalog_recs[sid] = cat_rec # #import pdb;pdb.set_trace() # start = DateTime.DateTime().timeTime() # start_chunk = DateTime.DateTime().timeTime() # for student in students: # if student.id not in student_recs.keys(): # continue # not_all = False # d = self.getFormattedStudentEntry(student_recs[student.id]) # student_obj = catalog_recs[student.id].getObject() for student in students: not_all = False d = self.getFormattedStudentEntry(student) student_obj = getattr(students_folder,student.id) for k in toexport.keys()[1:]: try: object = getattr(student_obj,k) object_doc = object.getContent() except: logger.info('%s %s record not found' % (student.id,k)) not_all = True continue for f in toexport[k]: d[f] = getattr(object_doc,f,'') if not_all: continue if with_lga: d['state'],d['lga'] = formatLGA(d['lga'],voc = self.portal_vocabularies.local_gov_areas) lines.append(format % d) total += 1 if total and not total % chunk or total == len(students): open(export_file,"a").write('\n'.join(lines) +'\n') anz = len(lines) logger.info("wrote %(anz)d total written %(total)d" % vars()) end_chunk = DateTime.DateTime().timeTime() duration = end_chunk-start_chunk per_record = duration/anz till_now = end_chunk - start avarage_per_record = till_now/total estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export) estimated_end = estimated_end.strftime("%H:%M:%S") logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars()) start_chunk = DateTime.DateTime().timeTime() lines = [] end = DateTime.DateTime().timeTime() logger.info('total time %6.2f m' % ((end-start)/60)) filename, extension = os.path.splitext(export_file) from subprocess import call msg = "wrote %(total)d records to %(export_file)s" % vars() try: retcode = call('gzip %s' % (export_file),shell=True) if retcode == 0: msg = "wrote %(total)d records to %(export_file)s.gz" % vars() except OSError, e: retcode = -99 logger.info("zip failed with %s" % e) logger.info(msg) args = {'portal_status_message': msg} #url = self.REQUEST.get('URL1') + '?' + urlencode(args) url = self.REQUEST.get('URL2') return self.REQUEST.RESPONSE.redirect(url) ###) security.declareProtected(ModifyPortalContent,"updateStudyCourse")###( def updateStudyCourse(self): """update StudyCourse from CSV values""" import transaction import random from pdb import set_trace wftool = self.portal_workflow #students_folder = self.portal_catalog({'meta_type': 'StudentsFolder'})[-1].getObject() students_folder = self.portal_url.getPortalObject().campus.students csv_d = {'jamb_reg_no': "RegNumber", 'jamb_lastname': "Name", 'session': "Session", 'pume_tot_score': "PUME SCORE", 'jamb_score': "JambScore", 'jamb_sex': "Sex", 'jamb_state': "State", ## 'jamb_first_cos': "AdminCourse", 'faculty': "AdminFaculty", 'course_code': "AdmitCoscode", 'stud_status':"AdmitStatus", 'department': "AdmitDept", 'jamb_lga': "LGA", 'app_email': "email", 'app_mobile': "PhoneNumbers", } csv_fields = [f[1] for f in csv_d.items()] tr_count = 0 total = 0 #name = 'pume_results' name = 'StudyCourseChange' no_import = [] s = ','.join(['"%s"' % fn for fn in csv_fields]) no_import.append('"Error",%s' % s) format = '"%(Error)s",' + ','.join(['"%%(%s)s"' % fn for fn in csv_fields]) no_certificate = "no certificate %s" % format open("%s/import/%s_not_imported.csv" % (i_home,name),"w").write( '\n'.join(no_import)) logger = logging.getLogger('Students.StudentsFolder.updateStudyCourse') logger.info('Start loading from %s.csv' % name) l = self.portal_catalog({'meta_type': "Certificate"}) try: result = csv.DictReader(open("%s/import/%s.csv" % (i_home,name),"rb")) except: logger.error('Error reading %s.csv' % name) return for jamb in result: jamb['Error'] = "Processing " logger.info(format % jamb) jamb_reg_no = jamb.get(csv_d['jamb_reg_no']) res = self.portal_catalog({'portal_type': "StudentApplication", 'SearchableText': jamb_reg_no }) if not res: em = 'Student with jamb_reg_no %s does not exists\n' % jamb_reg_no logger.info(em) jamb['Error'] = "Student does not exist" no_import.append(format % jamb) continue sid = res[0].getPath().split('/')[-2] cert_id = makeCertificateCode(jamb.get(csv_d['course_code'])) res = self.portal_catalog(portal_type = "Certificate", id = cert_id) if not res: em = 'No Certificate with ID %s \n' % cert_id logger.info(em) jamb['Error'] = "No Certificate %s" % cert_id no_import.append( format % jamb) continue cert_brain = res[0] catalog_entry = {} student = getattr(self,sid) # # Study Course # study_course = student.study_course dsc = {} cert_pl = cert_brain.getPath().split('/') catalog_entry['id'] = sid catalog_entry['faculty'] = cert_pl[-4] catalog_entry['department'] = cert_pl[-3] catalog_entry['course'] = cert_id dsc['study_course'] = cert_id study_course.getContent().edit(mapping=dsc) self.students_catalog.modifyRecord(**catalog_entry) if tr_count > 10: if len(no_import) > 1: open("%s/import/%s_not_imported.csv" % (i_home,name),"w+").write( '\n'.join(no_import)) no_import = [] em = '%d transactions commited\n' % tr_count transaction.commit() logger.info(em) total += tr_count tr_count = 0 tr_count += 1 return self.REQUEST.RESPONSE.redirect("%s" % self.REQUEST.get('URL1')) ###) From WAeUP_Tool.py: def d1402_importData(self,filename,name,edit=False,bypass_queue_catalog=False): ###( """load data from CSV values""" import transaction import random member = self.portal_membership.getAuthenticatedMember() logger = logging.getLogger('WAeUPTool.importData') current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S") import_date = DateTime.DateTime().strftime("%d/%m/%y %H:%M:%S") students_folder = self.portal_url.getPortalObject().campus.students start = True tr_count = 0 total_imported = 0 total_not_imported = 0 total = 0 pending_only = False pend_str = '_pending' if name.endswith(pend_str): pending_only = True name = name[:-len(pend_str)] iname = "import_%s" % name if name in ('application','course_result',): commit_after = 2000 else: commit_after = 100 stool = getToolByName(self, 'portal_schemas') ltool = getToolByName(self, 'portal_layouts') schema = stool._getOb(iname) if schema is None: em = 'No such schema %s' % iname logger.error('No such schema %s' % iname) return em layout = ltool._getOb(iname) if layout is None: em = 'No such layout %s' % iname logger.error(em) return em validators = {} for widget in layout.keys(): validators[widget] = layout[widget].validate mode = "create" if edit: if filename.endswith('_toDelete'): mode = "delete" else: mode = "edit" importer_name = "mass_%(mode)s_%(name)s" % vars() importer = getattr(self, '%s' % importer_name,None) if importer is None: em = 'No importer function %s' % importer_name logger.error(em) return em pending_fn = "%s/import/%ss_pending.csv" % (i_home,name) pending_imported_fn = "%s/import/%ss_pending_imported%s.csv" % (i_home,name,current) if pending_only: import_source_fn = pending_fn imported_fn = "%s/import/%ss_pending_imported%s.csv" % (i_home,name,current) not_imported_fn = "%s/import/%ss_pending_not_imported%s.csv" % (i_home,name,current) if not os.path.exists(pending_fn): em = 'No %s' % os.path.split(pending_fn) return em else: import_source_fn = "%s/import/%s.csv" % (i_home,filename) imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current) not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current) if not os.path.exists(import_source_fn): em = 'No %s' % os.path.split(import_soure_fn) return em attrs = csv.reader(open(import_source_fn,"rb")).next() import_keys = [k.strip() for k in attrs if not (k.strip().startswith('ignore') or k.strip() == 'Error')] diff2schema = set(import_keys).difference(set(schema.keys())) diff2layout = set(import_keys).difference(set(layout.keys())) if diff2schema and diff2schema != set(['id',]): msg = 'not ignorable key(s): "%s" found in heading' % ", ".join(diff2schema) return msg if mode in ("create","edit"): required_keys = [layout.getIdUnprefixed(id) for id,widget in layout.objectItems() if widget.is_required] if not set(required_keys).issubset(set(import_keys)): diff2import = set(required_keys).difference(set(import_keys)) msg = 'required key(s): "%s" not found in heading' % ", ".join(diff2import) return msg # # not_imported # not_imported_keys = import_keys + ["Error",] not_imported_file = open(not_imported_fn,"w") not_imported_csv_writer = csv.DictWriter(not_imported_file, not_imported_keys, extrasaction='ignore') mapping = dict((k,k) for k in not_imported_keys) not_imported_csv_writer.writerow(mapping) not_imported = [] # # imported # imported_keys = import_keys[:] if 'id' not in imported_keys: imported_keys.insert(0,"id") imported_file = open(imported_fn,"w") imported_csv_writer = csv.DictWriter(imported_file, imported_keys, extrasaction='ignore') mapping = dict((k,k) for k in imported_keys) imported_csv_writer.writerow(mapping) imported = [] # # pending # pending_records = {} imports_pending = 0 total_pending_imported = 0 total_pending = 0 format = ','.join(['"%%(%s)s"' % fn for fn in import_keys]) format_error = format + ',"%(Error)s"' #format = '"%(id)s",'+ format imported = [] if name in ('verdict','course_result',): #pending_keys = imported_keys[:] sname = "%s_pending" % iname pending_schema = stool._getOb(sname) if schema is None: em = 'No such pending_schema %s' % sname logger.error('No such schema %s' % sname) return em pending_keys = pending_schema.keys() pending_keys += "Error", if 'id' not in pending_keys: pending_keys.insert(0,'id') pending_records = {} if os.path.exists(pending_fn): pending_imports_file = open(pending_fn) pending_imports = csv.DictReader(pending_imports_file, fieldnames=pending_keys) if pending_imports: pending_records = dict((row['id'],row) for row in pending_imports if row['id'] != 'id') pending_imports_file.close() new_imports_pending = 0 pending_imported = [] if pending_only: pending_imported_ids = [] pending_imported = [] for record in pending_records.values(): item = {} for k,v in record.items(): if v: item[k] = v results = importer(item) id = results[0] error = results[1] is_pending = len(results) == 3 if is_pending: continue del item['Error'] msg = ";".join(["%s : %s" % (k,v) for k,v in item.items()]) logger.info("imported from %(pending_fn)s %(msg)s" % vars()) pending_imported.append(item) pending_imported_ids += id, total_pending_imported = len(pending_imported_ids) logger.info("imported %d pending imports" % len(pending_imported_ids)) for id in pending_imported_ids: if id: del pending_records[id] else: logger.info("tried to delete record with empty id") pending_imports_file = open(pending_fn,'w') pending_csv_writer = csv.DictWriter(pending_imports_file, pending_keys, extrasaction='ignore') mapping = dict((k,k) for k in pending_keys) pending_csv_writer.writerow(mapping) if len(pending_imported) > 0: pending_imported_file = open(pending_imported_fn,'w') pending_imported_csv_writer = csv.DictWriter(pending_imported_file, pending_keys, extrasaction='ignore') pending_imported_csv_writer.writerow(dict((k,k) for k in imported_keys)) pending_imported_csv_writer.writerows(pending_imported) # if pending_only: items = [] else: try: items = csv.DictReader(open(import_source_fn,"rb")) except: msg = 'Error reading %s.csv' % filename logger.error(msg) return msg not_imported = [] pending_keys = pending_records.keys()[:] for item in items: item = dict((k.strip(),l.strip()) for (k,l) in item.items()) if start: start = False adapters = [MappingStorageAdapter(schema, item)] logger.info('%s starts import from %s.csv in %s mode with schema and layout %s' % (member,filename,mode,iname)) dm = DataModel(item, adapters,context=self) ds = DataStructure(data=item,datamodel=dm) error_string = "" total += 1 error_count = 0 for k in import_keys: if not validators[k](ds,mode=mode): if error_count: error_string += ' ++ ' error_string += "%s: %s" % (k,self.translation_service(ds.getError(k), ds.getErrorMapping(k))) error_count += 1 if error_string: item['Error'] = error_string not_imported.append(item) total_not_imported += 1 continue temp_item = item.copy() temp_item.update(dm) #id,error = importer(temp_item) results = importer(temp_item) id = results[0] error = results[1] is_pending = len(results) == 3 if is_pending: temp_item = results[2] temp_item['Error'] = error msg = format_error % temp_item #if id not in pending_records.keys(): if id not in pending_keys: temp_item['id'] = id pending_records[id] = temp_item logger.info("%(id)s added to pending %(msg)s" % vars()) pending_keys += id, new_imports_pending += 1 else: logger.info("%(id)s already in pending %(msg)s" % vars()) continue elif error: item['Error'] = error not_imported.append(item) total_not_imported += 1 continue item = temp_item item['id'] = id imported.append(item) tr_count += 1 total_imported += 1 msg = format % item logger.info("%(total_imported)d of %(total)d imported in %(mode)s mode, %(msg)s" % vars()) if total and not total % commit_after: transaction.commit() if len(imported) > 0: imported_csv_writer.writerows(imported) imported = [] if len(not_imported) > 0: not_imported_csv_writer.writerows(not_imported) not_imported = [] if len(pending_records) > 0: pending_csv_writer.writerows(pending_records.values()) total_pending += len(pending_records) pending_records = {} msg = '%d transactions committed\n' % (tr_count) regs = [] logger.info(msg) tr_count = 0 if len(imported) > 0: imported_csv_writer.writerows(imported) if len(not_imported) > 0: not_imported_csv_writer.writerows(not_imported) if len(pending_records) > 0: pending_csv_writer.writerows(pending_records.values()) total_pending += len(pending_records) import_source_fn = os.path.split(import_source_fn)[1] pending_fn = os.path.split(pending_fn)[1] msg = "Finished importing in %(mode)s mode from %(import_source_fn)s: " % vars() msg += "%(total_imported)d imported, %(total_not_imported)d not imported, " % vars() if total_pending: if pending_only: msg += "%(new_imports_pending)d pending added, %(total_pending_imported)d pending imported " % vars() else: msg += "%(new_imports_pending)d pending added " % vars() msg += "(of total %(total)d), " % vars() msg += "%(total_pending)d total pending in %(pending_fn)s " % vars() logger.info(msg) return msg ###)