1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23 """
24 This module executes pickled jobs on the cluster.
25
26 @author: Christian Widmer
27 @author: Cheng Soon Ong
28 @author: Dan Blanchard (dblanchard@ets.org)
29 """
30
31 from __future__ import absolute_import, print_function, unicode_literals
32
33 import argparse
34 import os
35 import sys
36
37 from redis import StrictRedis
38
39 from gridmap.data import clean_path, zload_db, zsave_db
40 from gridmap.job import REDIS_DB, REDIS_PORT
41
42
43 -def _run_job(uniq_id, job_num, temp_dir, redis_host):
44 """
45 Execute the pickled job and produce pickled output.
46
47 @param uniq_id: The unique suffix for the tables corresponding to this job
48 in the database.
49 @type uniq_id: C{basestring}
50 @param job_num: The index for this job's content in the job and output
51 tables.
52 @type job_num: C{int}
53 @param temp_dir: Local temporary directory for storing output for an
54 individual job.
55 @type temp_dir: C{basestring}
56 @param redis_host: Hostname of the database to connect to get the job data.
57 @type redis_host: C{basestring}
58 """
59
60 redis_server = StrictRedis(host=redis_host, port=REDIS_PORT, db=REDIS_DB)
61
62 print("Loading job...", end="", file=sys.stderr)
63 sys.stderr.flush()
64 try:
65 import pdb
66 pdb.set_trace()
67 job = zload_db(redis_server, 'job{0}'.format(uniq_id), job_num)
68 except Exception as detail:
69 job = None
70 print("FAILED", file=sys.stderr)
71
72 print("Writing exception to database for job {0}...".format(job_num),
73 end="", file=sys.stderr)
74 sys.stderr.flush()
75 zsave_db(detail, redis_server, 'output{0}'.format(uniq_id), job_num)
76 print("done", file=sys.stderr)
77 else:
78 print("done", file=sys.stderr)
79
80 print("Running job...", end="", file=sys.stderr)
81 sys.stderr.flush()
82 job.execute()
83 print("done", file=sys.stderr)
84
85 print("Writing output to database for job {0}...".format(job_num),
86 end="", file=sys.stderr)
87 sys.stderr.flush()
88 zsave_db(job.ret, redis_server, 'output{0}'.format(uniq_id), job_num)
89 print("done", file=sys.stderr)
90
91
92 if job.cleanup:
93 log_stdout_fn = os.path.join(temp_dir, '{0}.o{1}'.format(job.name,
94 job.jobid))
95 log_stderr_fn = os.path.join(temp_dir, '{0}.e{1}'.format(job.name,
96 job.jobid))
97
98 try:
99 os.remove(log_stdout_fn)
100 os.remove(log_stderr_fn)
101 except OSError:
102 pass
103
104
106 """
107 Parse the command line inputs and call _run_job
108 """
109
110
111 parser = argparse.ArgumentParser(description="This wrapper script will run \
112 a pickled Python function on \
113 some pickled data in a Redis\
114 database, " + "and write the\
115 results back to the database.\
116 You almost never want to run\
117 this yourself.",
118 formatter_class=argparse.ArgumentDefaultsHelpFormatter,
119 conflict_handler='resolve')
120 parser.add_argument('uniq_id',
121 help='The unique suffix for the tables corresponding to\
122 this job in the database.')
123 parser.add_argument('job_number',
124 help='Which job number should be run. Dictates which \
125 input data is read from database and where output\
126 data is stored.',
127 type=int)
128 parser.add_argument('module_dir',
129 help='Directory that contains module containing pickled\
130 function. This will get added to PYTHONPATH \
131 temporarily.')
132 parser.add_argument('temp_dir',
133 help='Directory that temporary output will be stored\
134 in.')
135 parser.add_argument('redis_host',
136 help='The hostname of the server that where the Redis\
137 database is.')
138 args = parser.parse_args()
139
140 print("Appended {0} to PYTHONPATH".format(args.module_dir), file=sys.stderr)
141 sys.path.append(clean_path(args.module_dir))
142
143
144 _run_job(args.uniq_id, args.job_number, clean_path(args.temp_dir),
145 args.redis_host)
146
147
148 if __name__ == "__main__":
149 _main()
150