Odyssey
cudrop_test.py
1 # !/usr/bin/env python
2 """Main module to migrate admin data.
3 
4 Usage: export ODY_MIGR_SECRET_KEY=n3c3s1t@m0$Mud@rT0d@$L@$C0$@$! && \
5  python3 app/tools/bin/cudrop_test.py
6 """
7 
8 import logging
9 import unittest
10 import subprocess
11 import shlex
12 from datetime import datetime, timedelta
13 import time
14 
15 # import only necessary variables for settings
16 from ody_migr_config import (INSERT,
17  SELECT_RETURN,
18  DELETE,
19  TEST_USERNAME,
20  TEST_PASSWORD,
21  TESTING_SERVER)
22 
23 from ody_migr_transaction import PgTransaction
24 from ody_migr_mmth_endpoint import MammothMigration
25 from psycopg2.extras import DictCursor
26 from ody_migr_utils import format_error_permission
27 import ody_migr_db_handler as pg_handler
28 
29 LOGGER = logging.getLogger(__name__)
30 
31 # verbose flag
32 VERBOSE = 0
33 # serial number offset, to avoid duplicate constraint violation in tables
34 SERIAL_OFFSET = 10000
35 # number of new records to be added during test
36 NUM_OF_NEW_RECORDS = 2
37 # limit to select number of records
38 LIMIT_SELECT = 6
39 # wait time to let database be ready (seconds)
40 FIXTURE_READY_TIME = 5
41 
42 TEST_DATE_NOW = datetime.now().strftime("%Y%m%d")
43 # test date yearmo (used for culogtrack)
44 TEST_DATE_NOW_YEARMO = datetime.now().strftime("%Y%m")
45 
46 # cleanup scripts location
47 PERL_SCRIPTS_ROOT = "/opt/odyssey/tools/bin"
48 
49 # tables to cleanup script map dictionary
50 table_to_perlscript_map = {
51  "curdcstatus": "cudropcurdcstatus.pl",
52  "cu_vendorlog": "cudropcu_vendorlog.pl",
53  "cu_vendorcache": "cudropcu_vendorcache.pl",
54  "lnappuserresponse": "cudroplnapp.pl",
55  "culogtrack": "cudropculogtrack.pl",
56  "history": "cudrophist.pl",
57  "cuaudituser": "cudropcuaudituser.pl",
58  "cuauditadmin": "cudropcuauditadmin.pl",
59  "trans_hdr_dtl": "cudropcutrans.pl",
60  "cuuser": "cudropcuusers.pl",
61 }
62 
63 # straightforward tables
64 MISC_TABLES = [
65  "curdcstatus",
66  "cu_vendorlog",
67  "cu_vendorcache",
68  "lnappuserresponse",
69  "culogtrack"
70 ]
71 
72 # history tables
73 HISTORY_TABLES = [
74  "scrubcuaccounthistory",
75  "scrubculoanhistory"
76 ]
77 
78 
79 def run_cmd_subprocess(_cmd):
80  """Utility to execute command: run _cmd as a sub-process and wait until
81  completion.
82 
83  Args:
84  _cmd: command to run as a sub-process (either sequence of commands
85  or a whitespace separated string, in which case it is splited
86  using shell-like syntax
87  Raises:
88  Catch IOError, OSError, handler error and raise SystemExit
89  """
90  try:
91  if not isinstance(_cmd, list):
92  _cmd = shlex.split(_cmd)
93  subprocess.Popen(_cmd, shell=False).wait()
94  except (IOError, OSError) as e:
95  perm_err_msg = format_error_permission(e, " ".join(_cmd))
96  LOGGER.error(perm_err_msg)
97  raise SystemExit()
98 
99 
100 def fixture_load():
101  """Load content for tables"""
102  cu = "scrubcu"
103  hdr_tablename = "{}transhdr".format(cu)
104  dtl_tablename = "{}transdtl".format(cu)
105 
106  with pg_handler.PGSession() as conn:
107  with conn.cursor() as cur:
108  setup_transaction = PgTransaction(conn, cur)
109  # get members list/content from Mammoth
110  misc_migration = MammothMigration(
111  cu,
112  "misctables",
113  TESTING_SERVER,
114  TEST_USERNAME,
115  TEST_PASSWORD,
116  endpoint_script="hcuadm/mOdysseyMigrMiscTables.prg"
117  )
118 
119  misc_migration_response = misc_migration.run()
120  mmth_misc_data = misc_migration_response["data"]
121  for tbl, records in mmth_misc_data.items():
122  if tbl in ["curdcstatus"]:
123  for record in records:
124  record["user_id"] = 100
125 
126  setup_transaction(
127  INSERT,
128  tbl,
129  collection=records
130  )
131 
132  # hdr records expected to be deleted
133  delete_hdr_1 = (datetime.now() -
134  timedelta(days=367)).strftime("%Y%m%d")
135 
136  delete_hdr_2 = (datetime.now() -
137  timedelta(days=500)).strftime("%Y%m%d")
138  # hdr records not to be deleted
139  donot_delete_hdr_1 = (
140  datetime.now() -
141  timedelta(days=365)).strftime("%Y%m%d")
142 
143  donot_delete_hdr_2 = datetime.now().strftime("%Y%m%d")
144 
145  # hdr records
146  hdr_records = [
147  {"id": 1, "processed_date": delete_hdr_1}, # should be deleted
148  {"id": 2, "processed_date": delete_hdr_2}, # should be deleted
149  {"id": 3, "processed_date": donot_delete_hdr_1},
150  {"id": 4, "processed_date": donot_delete_hdr_2}
151  ]
152  # dtl records
153  dtl_records = [
154  {"id": 1, "transhdr_id": 1}, # should be deleted
155  {"id": 2, "transhdr_id": 2}, # should be deleted
156  {"id": 3, "transhdr_id": 2}, # should be deleted
157  {"id": 4, "transhdr_id": 3},
158  {"id": 5, "transhdr_id": 4}
159  ]
160 
161  # insert hdr records
162  setup_transaction(
163  INSERT,
164  hdr_tablename,
165  collection=hdr_records
166  )
167 
168  # insert dtl records
169  setup_transaction(
170  INSERT,
171  dtl_tablename,
172  collection=dtl_records
173  )
174 
175  setup_transaction.commit()
176 
177 
178 def fixture_cleanup():
179  """Delete fixture content from tables"""
180  cu = "scrubcu"
181  hdr_tablename = "{}transhdr".format(cu)
182  dtl_tablename = "{}transdtl".format(cu)
183  with pg_handler.PGSession() as conn:
184  with conn.cursor() as cur:
185  cleanup_transaction = PgTransaction(conn, cur)
186  # delete misc tables and history tables
187  for tbl in MISC_TABLES + HISTORY_TABLES:
188  cleanup_transaction(
189  DELETE,
190  tbl
191  )
192 
193  # delete from <cu>transhdr table
194  cleanup_transaction(
195  DELETE,
196  hdr_tablename
197  )
198 
199  # delete from <cu>transdtl table
200  cleanup_transaction(
201  DELETE,
202  dtl_tablename
203  )
204  cleanup_transaction.commit()
205 
206 
207 class TestDataCleanupScripts(unittest.TestCase):
208  """Test suite for database cleanup scripts"""
209 
210  def setUp(self):
211  """Load fixture"""
212  fixture_cleanup()
213  fixture_load()
214 
215  def test_misc_counts(self):
216  """Validate expected counts for following tables:
217 
218  - curdcstatus
219  - cu_vendorlog
220  - cu_vendorcache
221  - lnappuserresponse
222  - culogtrack
223  """
224  with pg_handler.PGSession() as conn:
225  with conn.cursor() as cur:
226  select_transaction = PgTransaction(conn, cur)
227 
228  curdcstatus_count = select_transaction(
229  SELECT_RETURN,
230  "curdcstatus",
231  return_only_count=True
232  )
233  self.assertEqual(curdcstatus_count, LIMIT_SELECT)
234 
235  cuvendorlog_count = select_transaction(
236  SELECT_RETURN,
237  "cu_vendorlog",
238  return_only_count=True
239  )
240  self.assertEqual(cuvendorlog_count, LIMIT_SELECT)
241 
242  cuvendorcache_count = select_transaction(
243  SELECT_RETURN,
244  "cu_vendorcache",
245  return_only_count=True
246  )
247  self.assertEqual(cuvendorcache_count, min(LIMIT_SELECT, 23))
248 
249  lnappuserresponse_count = select_transaction(
250  SELECT_RETURN,
251  "lnappuserresponse",
252  return_only_count=True
253  )
254  self.assertEqual(lnappuserresponse_count,
255  min(LIMIT_SELECT, 44))
256 
257  culogtrack_count = select_transaction(
258  SELECT_RETURN,
259  "culogtrack",
260  return_only_count=True
261  )
262  self.assertEqual(culogtrack_count, LIMIT_SELECT)
263 
264  def test_drop_cu_hist(self):
265  """Test cleanupscripts for history tables:
266 
267  - scrubculoanhistory
268  - scrubcuaccounthistory
269 
270  Cleanup Scripts:
271  - cudrophist.pl
272  """
273  with pg_handler.PGSession() as conn:
274  with conn.cursor(cursor_factory=DictCursor) as dict_cur:
275  hist_transaction = PgTransaction(conn, dict_cur)
276  # pre-cleanup
277  for tbl_account, tbl_loan in [("scrubcuaccounthistory",
278  "scrubculoanhistory")]:
279 
280  # 1. get the initial records from setUp
281  initial_records_loan = hist_transaction(
282  SELECT_RETURN,
283  tbl_loan
284  )
285 
286  initial_records_account = hist_transaction(
287  SELECT_RETURN,
288  tbl_account
289  )
290 
291  assert len(initial_records_account) >= NUM_OF_NEW_RECORDS
292  assert len(initial_records_loan) >= NUM_OF_NEW_RECORDS
293 
294  if VERBOSE:
295  print("\ninitial records [{}]\n".format("history"))
296  for rec in initial_records_loan +\
297  initial_records_account:
298  print(rec["date"])
299 
300  # 2. add NUM_OF_NEW_RECORDS with today's updatedate (these
301  # NUM_OF_NEW_RECORDS records must not be scraped by the db
302  # cleanup cronjob cudropcurdcstatus.pl)
303 
304  latest_records_account = []
305  latest_records_loan = []
306 
307  for i, record in enumerate(initial_records_account[
308  :NUM_OF_NEW_RECORDS]):
309  latest_rec = record.copy()
310  latest_rec["date"] = TEST_DATE_NOW
311 
312  latest_rec["accountnumber"] = int(
313  latest_rec["accountnumber"]) + SERIAL_OFFSET + i
314 
315  latest_rec["tracenumber"] = int(
316  latest_rec["tracenumber"]) + SERIAL_OFFSET + i
317 
318  latest_rec["certnumber"] += SERIAL_OFFSET + i
319 
320  latest_records_account.append(latest_rec)
321 
322  for record in initial_records_loan[:NUM_OF_NEW_RECORDS]:
323  latest_rec = record.copy()
324 
325  latest_rec["date"] = TEST_DATE_NOW
326 
327  latest_rec["accountnumber"] = int(
328  latest_rec["accountnumber"]) + SERIAL_OFFSET + i
329 
330  latest_rec["tracenumber"] = int(
331  latest_rec["tracenumber"]) + SERIAL_OFFSET + i
332 
333  latest_records_loan.append(latest_rec)
334 
335  if VERBOSE:
336  print(("\nnewly added records with "
337  "today's date [{}]\n").format("history"))
338  for rec in latest_records_loan +\
339  latest_records_account:
340  print(rec["date"])
341 
342  hist_transaction(
343  INSERT,
344  tbl_account,
345  collection=latest_records_account
346  )
347 
348  hist_transaction(
349  INSERT,
350  tbl_loan,
351  collection=latest_records_loan
352  )
353 
354  loan_records_after_insertion = hist_transaction(
355  SELECT_RETURN,
356  tbl_loan
357  )
358 
359  account_records_after_insertion = hist_transaction(
360  SELECT_RETURN,
361  tbl_account
362  )
363 
364  assert len(loan_records_after_insertion)\
365  == len(initial_records_loan) + NUM_OF_NEW_RECORDS
366 
367  assert len(account_records_after_insertion)\
368  == len(initial_records_account) + NUM_OF_NEW_RECORDS
369 
370  # 3. execute cleanup script (outside the for loop, we only
371  # run this once)
372  run_cmd_subprocess("{}/{} SCRUBCU".format(
373  PERL_SCRIPTS_ROOT,
374  table_to_perlscript_map["history"]))
375 
376  # 4. post cleanup
377  for tbl_account, tbl_loan in [("scrubcuaccounthistory",
378  "scrubculoanhistory")]:
379  loan_records_after_cleanup = hist_transaction(
380  SELECT_RETURN,
381  tbl_loan
382  )
383 
384  account_records_after_cleanup = hist_transaction(
385  SELECT_RETURN,
386  tbl_account
387  )
388 
389  assert len(
390  loan_records_after_cleanup) == NUM_OF_NEW_RECORDS
391  assert len(
392  account_records_after_cleanup) == NUM_OF_NEW_RECORDS
393 
394  if VERBOSE:
395  print(
396  "\nrecords after cleanup [{}]\n".format("history"))
397  for rec in loan_records_after_cleanup +\
398  account_records_after_cleanup:
399  print(rec["date"])
400 
402  """Test cleanupscripts for trans{hdr,dtl} tables:
403 
404  - scrubcutranshdr
405  - scrubcutransdtl
406 
407  Cleanup Scripts:
408  - cudropcutrans.pl
409  """
410  cu = "scrubcu"
411  hdr_tablename = "{}transhdr".format(cu)
412  dtl_tablename = "{}transdtl".format(cu)
413  with pg_handler.PGSession() as conn:
414  with conn.cursor(cursor_factory=DictCursor) as dict_cur:
415  trans_transaction = PgTransaction(conn, dict_cur)
416 
417  initial_hdr_records_count = trans_transaction(
418  SELECT_RETURN,
419  hdr_tablename,
420  return_only_count=True
421  )
422  self.assertEqual(initial_hdr_records_count, 4)
423 
424  initial_dtl_records_count = trans_transaction(
425  SELECT_RETURN,
426  dtl_tablename,
427  return_only_count=True
428  )
429  self.assertEqual(initial_dtl_records_count, 5)
430 
431  # 3. execute cleanup script
432  run_cmd_subprocess("{}/{} SCRUBCU".format(
433  PERL_SCRIPTS_ROOT,
434  table_to_perlscript_map["trans_hdr_dtl"]))
435 
436  hdr_records_count_after_cleanup = trans_transaction(
437  SELECT_RETURN,
438  hdr_tablename,
439  return_only_count=True
440  )
441  self.assertEqual(hdr_records_count_after_cleanup, 2)
442 
443  dtl_records_count_after_cleanup = trans_transaction(
444  SELECT_RETURN,
445  dtl_tablename,
446  return_only_count=True
447  )
448  self.assertEqual(dtl_records_count_after_cleanup, 2)
449 
450  def test_drop_cu_misc(self):
451  """Validate expected counts for following tables:
452 
453  - curdcstatus
454  - cu_vendorlog
455  - cu_vendorcache
456  - lnappuserresponse
457  - culogtrack
458 
459  Cleanup Scripts:
460  - cudropcurdcstatus.pl
461  - cudropcu_vendorlog.pl
462  - cudropcu_vendorcache.pl
463  - cudroplnapp.pl
464  - cudropculogtrack.pl
465  """
466  with pg_handler.PGSession() as conn:
467  with conn.cursor(cursor_factory=DictCursor) as dict_cur:
468  curdcstatus_transaction = PgTransaction(conn, dict_cur)
469  for table_name in MISC_TABLES:
470  # if table_name != "cu_vendorcache":
471  # continue
472 
473  # 1. get the initial records from setUp
474  initial_records = curdcstatus_transaction(
475  SELECT_RETURN,
476  table_name
477  )
478 
479  if VERBOSE:
480  print("\ninitial records [{}]\n".format(table_name))
481  for rec in initial_records:
482  if table_name == "lnappuserresponse":
483  print(rec["respsubmiton"], rec["respstarton"])
484  else:
485  print(rec)
486 
487  # 2. add NUM_OF_NEW_RECORDS with today's updatedate (these
488  # NUM_OF_NEW_RECORDS records must not be scraped by the db
489  # cleanup cronjob cudropcurdcstatus.pl)
490  latest_records = []
491  assert len(initial_records) >= NUM_OF_NEW_RECORDS
492 
493  for i, record in enumerate(initial_records[
494  :NUM_OF_NEW_RECORDS]):
495  latest_rec = record.copy()
496 
497  if table_name == "curdcstatus":
498  latest_rec["lastupdate"] = TEST_DATE_NOW
499  latest_rec["depositid"] += SERIAL_OFFSET + i
500 
501  elif table_name == "lnappuserresponse":
502  latest_rec["respid"] += SERIAL_OFFSET + i
503  latest_rec["respsubmiton"] = TEST_DATE_NOW
504  latest_rec["respstarton"] = TEST_DATE_NOW
505 
506  elif table_name == "culogtrack":
507  latest_rec["accountnumber"] = int(
508  latest_rec["accountnumber"])\
509  + SERIAL_OFFSET\
510  + i
511  latest_rec["yearmo"] = TEST_DATE_NOW_YEARMO
512 
513  elif table_name in ["cu_vendorlog", "cu_vendorcache"]:
514  latest_rec["entry"] = TEST_DATE_NOW
515 
516  latest_records.append(latest_rec)
517 
518  if VERBOSE:
519  print(("\nnewly added records with "
520  "today's date [{}]\n").format(table_name))
521  for rec in latest_records:
522  if table_name == "lnappuserresponse":
523  print(rec["respsubmiton"], rec["respstarton"])
524  else:
525  print(rec)
526 
527  curdcstatus_transaction(
528  INSERT,
529  table_name,
530  collection=latest_records
531  )
532  curdcstatus_transaction.commit()
533 
534  records_after_insertion = curdcstatus_transaction(
535  SELECT_RETURN,
536  table_name
537  )
538 
539  assert len(records_after_insertion)\
540  == len(initial_records) + NUM_OF_NEW_RECORDS
541 
542  # 3. execute cleanup script
543  run_cmd_subprocess("{}/{}".format(
544  PERL_SCRIPTS_ROOT,
545  table_to_perlscript_map[table_name]))
546 
547  # 4. post cleanup
548  records_after_cleanup = curdcstatus_transaction(
549  SELECT_RETURN,
550  table_name
551  )
552 
553  assert len(records_after_cleanup) == NUM_OF_NEW_RECORDS
554 
555  if VERBOSE:
556  print(
557  "\nrecords after cleanup [{}]\n".format(
558  table_name))
559  for rec in records_after_cleanup:
560  if table_name == "lnappuserresponse":
561  print(rec["respsubmiton"], rec["respstarton"])
562  else:
563  print(rec)
564 
565  def test_cuauditadmin(self):
566  """Test cuauditadmin cleanup script
567 
568  Cleanup Script:
569  - cudropcuauditadmin.pl
570  """
571  run_cmd_subprocess("{} {} {} clean SCRUBCU admin {} {}".format(
572  "/usr/bin/python3",
573  "/opt/odyssey/tools/bin/ody_migr_executor.py",
574  TESTING_SERVER,
575  TEST_USERNAME,
576  TEST_PASSWORD
577  ))
578 
579  run_cmd_subprocess("{} {} {} migrate SCRUBCU admin {} {}".format(
580  "/usr/bin/python3",
581  "/opt/odyssey/tools/bin/ody_migr_executor.py",
582  TESTING_SERVER,
583  TEST_USERNAME,
584  TEST_PASSWORD
585  ))
586 
587  time.sleep(FIXTURE_READY_TIME)
588 
589  with pg_handler.PGSession() as conn:
590  with conn.cursor(cursor_factory=DictCursor) as dict_cur:
591  cuauditadmin_transaction = PgTransaction(conn, dict_cur)
592 
593  dict_cur.execute("SELECT * from cuauditadmin where "
594  "auditdate<'20170801' limit {}".format(
595  LIMIT_SELECT))
596 
597  to_be_deleted_records = dict_cur.fetchall()
598  to_be_deleted_records = [dict(rec)
599  for rec in to_be_deleted_records]
600  # initial_to_be_deleted_count = min(
601  # len(to_be_deleted_records), LIMIT_SELECT)
602 
603  dict_cur.execute("delete from cuauditadmin")
604  cuauditadmin_transaction.commit()
605 
606  # insert fixture data (to be deleted + recent data)
607  recent_records = []
608  for i, rec in enumerate(to_be_deleted_records):
609  rec_recent = rec.copy()
610  rec_recent["auditdate"] = TEST_DATE_NOW
611  rec_recent["user_name"] = rec_recent["user_name"] + str(i)
612  recent_records.append(rec_recent)
613 
614  cuauditadmin_transaction(
615  INSERT,
616  "cuauditadmin",
617  collection=to_be_deleted_records + recent_records
618  )
619  cuauditadmin_transaction.commit()
620 
621  run_cmd_subprocess("{}/{}".format(
622  PERL_SCRIPTS_ROOT,
623  table_to_perlscript_map["cuauditadmin"]))
624 
625  after_cleanup_count = cuauditadmin_transaction(
626  SELECT_RETURN,
627  "cuauditadmin",
628  return_only_count=True
629  )
630 
631  self.assertEqual(after_cleanup_count, len(recent_records))
632 
633  def test_cuuser(self):
634  """Verify deletion of <cu>user (only) table
635 
636  Cleanup Scripts:
637  - cudropcuusers.pl
638  """
639  run_cmd_subprocess("{} {} {} clean SCRUBCU memdata {} {}".format(
640  "/usr/bin/python3",
641  "/opt/odyssey/tools/bin/ody_migr_executor.py",
642  TESTING_SERVER,
643  TEST_USERNAME,
644  TEST_PASSWORD
645  ))
646  run_cmd_subprocess("{} {} {} migrate SCRUBCU memdata {} {}".format(
647  "/usr/bin/python3",
648  "/opt/odyssey/tools/bin/ody_migr_executor.py",
649  TESTING_SERVER,
650  TEST_USERNAME,
651  TEST_PASSWORD
652  ))
653 
654  time.sleep(FIXTURE_READY_TIME)
655 
656  with pg_handler.PGSession() as conn:
657  with conn.cursor(cursor_factory=DictCursor) as dict_cur:
658  cuuser_transaction = PgTransaction(conn, dict_cur)
659 
660  # 1. prepare fixture to delete
661  dict_cur.execute("SELECT * from scrubcuuser "
662  "limit {}".format(LIMIT_SELECT))
663 
664  initial_records = dict_cur.fetchall()
665  initial_records = [dict(rec)
666  for rec in initial_records]
667 
668  # make sure we have a few records in scrubcuuser already
669  if len(initial_records) != 0:
670 
671  # empty the table
672  cuuser_transaction(
673  DELETE,
674  "scrubcuuser"
675  )
676  cuuser_transaction.commit()
677 
678  new_records = []
679  # add one record that should not be deleted
680  record_not_to_delete = initial_records[0].copy()
681  record_not_to_delete["pwchange"] = TEST_DATE_NOW
682  record_not_to_delete["passwd"] = 's0m3^@l!dPassW0rD'
683  if "user_id" in record_not_to_delete:
684  del record_not_to_delete["user_id"]
685  record_not_to_delete["user_name"] += str(SERIAL_OFFSET)
686  new_records.append(record_not_to_delete)
687 
688  # prepare records eligible for deletion
689  for i, rec in enumerate(initial_records):
690  updated_rec = rec.copy()
691  updated_rec["pwchange"] = '20160801'
692  updated_rec["passwd"] = 'NULL PASSWORD'
693  # handled by auto-increment
694  if "user_id" in updated_rec:
695  del updated_rec["user_id"]
696  updated_rec["user_name"] += str(
697  SERIAL_OFFSET + i * 5 + 1)
698  new_records.append(updated_rec)
699 
700  cuuser_transaction(
701  INSERT,
702  "scrubcuuser",
703  collection=new_records
704  )
705  cuuser_transaction.commit()
706 
707  run_cmd_subprocess("{}/{} SCRUBCU".format(
708  PERL_SCRIPTS_ROOT,
709  table_to_perlscript_map["cuuser"]))
710 
711  count_after_cleanup = cuuser_transaction(
712  SELECT_RETURN,
713  "scrubcuuser",
714  return_only_count=True
715  )
716  self.assertEqual(count_after_cleanup, 1)
717 
718  def test_audituser(self):
719  """Test <cu>audituser cleanup script
720 
721  Cleanup Script:
722  - cudropcuaudituser.pl
723  """
724  run_cmd_subprocess("{} {} {} clean SCRUBCU memdata {} {}".format(
725  "/usr/bin/python3",
726  "/opt/odyssey/tools/bin/ody_migr_executor.py",
727  TESTING_SERVER,
728  TEST_USERNAME,
729  TEST_PASSWORD
730  ))
731 
732  run_cmd_subprocess("{} {} {} migrate SCRUBCU memdata {} {}".format(
733  "/usr/bin/python3",
734  "/opt/odyssey/tools/bin/ody_migr_executor.py",
735  TESTING_SERVER,
736  TEST_USERNAME,
737  TEST_PASSWORD
738  ))
739 
740  time.sleep(FIXTURE_READY_TIME)
741 
742  with pg_handler.PGSession() as conn:
743  with conn.cursor(cursor_factory=DictCursor) as dict_cur:
744  cuauditadmin_transaction = PgTransaction(conn, dict_cur)
745 
746  dict_cur.execute("SELECT * from scrubcuaudituser "
747  "limit {}".format(LIMIT_SELECT))
748 
749  initial_records = dict_cur.fetchall()
750  initial_records = [dict(rec)
751  for rec in initial_records]
752 
753  cuauditadmin_transaction(
754  DELETE,
755  "scrubcuaudituser"
756  )
757  cuauditadmin_transaction.commit()
758 
759  # insert fixture data (to be deleted + recent data)
760  recent_records = []
761  for i, rec in enumerate(initial_records):
762  rec_recent = rec.copy()
763  rec_recent["auditdate"] = TEST_DATE_NOW
764  rec_recent["accountnumber"] += chr(i * 2 + 97)
765  recent_records.append(rec_recent)
766 
767  rec_recent_del = rec.copy()
768  rec_recent_del["auditdate"] = '20140801'
769  rec_recent_del["accountnumber"] += chr(i * 2 + 1 + 97)
770  recent_records.append(rec_recent_del)
771 
772  cuauditadmin_transaction(
773  INSERT,
774  "scrubcuaudituser",
775  collection=recent_records
776  )
777  cuauditadmin_transaction.commit()
778 
779  run_cmd_subprocess("{}/{} SCRUBCU".format(
780  PERL_SCRIPTS_ROOT,
781  table_to_perlscript_map["cuaudituser"]))
782 
783  after_cleanup_count = cuauditadmin_transaction(
784  SELECT_RETURN,
785  "scrubcuaudituser",
786  return_only_count=True
787  )
788  self.assertEqual(after_cleanup_count, len(recent_records) / 2)
789 
790  def tearDown(self):
791  """Cleanup fixture"""
792  fixture_cleanup()
793 
794 
795 if __name__ == "__main__":
796  unittest.main()