6
6
import os
7
7
import re
8
8
import subprocess
9
- import sys
9
+ import sys
10
10
import time
11
11
import watchtower
12
12
import string
16
16
#################################
17
17
18
18
DATA_ROOT = '/home/ubuntu/bucket'
19
- LOCAL_OUTPUT = '/home/ubuntu/local_output'
20
19
QUEUE_URL = os .environ ['SQS_QUEUE_URL' ]
21
20
AWS_BUCKET = os .environ ['AWS_BUCKET' ]
22
21
LOG_GROUP_NAME = os .environ ['LOG_GROUP_NAME' ]
@@ -33,7 +32,7 @@ class JobQueue():
33
32
def __init__ (self , queueURL ):
34
33
self .client = boto3 .client ('sqs' )
35
34
self .queueURL = queueURL
36
-
35
+
37
36
def readMessage (self ):
38
37
response = self .client .receive_message (QueueUrl = self .queueURL , WaitTimeSeconds = 20 )
39
38
if 'Messages' in response .keys ():
@@ -63,7 +62,7 @@ def monitorAndLog(process,logger):
63
62
break
64
63
if output :
65
64
print (output .strip ())
66
- logger .info (output )
65
+ logger .info (output )
67
66
68
67
def printandlog (text ,logger ):
69
68
print (text )
@@ -82,72 +81,69 @@ def stringify_metadata_dict(mdict):
82
81
def runFIJI (message ):
83
82
#List the directories in the bucket- this prevents a strange s3fs error
84
83
rootlist = os .listdir (DATA_ROOT )
85
- for eachSubDir in rootlist :
86
- subDirName = os .path .join (DATA_ROOT ,eachSubDir )
87
- if os .path .isdir (subDirName ):
88
- trashvar = os .system ('ls ' + subDirName )
89
84
90
85
# Configure the logs
91
86
logger = logging .getLogger (__name__ )
92
87
93
-
88
+
94
89
# Read the metadata string
95
90
96
-
91
+
97
92
# Prepare paths and parameters
98
- localOut = LOCAL_OUTPUT
93
+ localOut = 'output'
99
94
remoteOut = message ['output_file_location' ]
100
95
101
96
# Start loggging now that we have a job we care about
102
97
metadataID = stringify_metadata_dict (message ['Metadata' ])
103
- watchtowerlogger = watchtower .CloudWatchLogHandler (log_group = LOG_GROUP_NAME , stream_name = metadataID ,create_log_group = False )
104
- logger .addHandler (watchtowerlogger )
105
-
98
+ metadata_for_log_name = metadataID .replace ('*' ,'.' )
99
+ watchtowerlogger = watchtower .CloudWatchLogHandler (log_group = LOG_GROUP_NAME , stream_name = metadata_for_log_name ,create_log_group = False )
100
+ logger .addHandler (watchtowerlogger )
101
+
106
102
# Build and run FIJI command
107
- cmd = '../../opt/fiji/ Fiji.app/ImageJ-linux64 --ij2 --headless --console --run "../../opt/fiji/ Fiji.app/plugins/' + SCRIPT_NAME + '" "'
108
- cmd += stringify_metadata_dict (message ['shared_metadata' ]) + ', ' + metadataID + '"'
103
+ cmd = [ " Fiji.app/ImageJ-linux64" , " --ij2" , " --headless" , " --console" , " --run" , os . path . join ( " Fiji.app/plugins" , SCRIPT_NAME )]
104
+ cmd . append ( stringify_metadata_dict (message ['shared_metadata' ]) + ', ' + metadataID )
109
105
print ('Running' , cmd )
110
106
logger .info (cmd )
111
-
112
- subp = subprocess .Popen (cmd . split () , stdout = subprocess .PIPE , stderr = subprocess .STDOUT )
107
+
108
+ subp = subprocess .Popen (cmd , stdout = subprocess .PIPE , stderr = subprocess .STDOUT )
113
109
monitorAndLog (subp ,logger )
114
-
110
+
115
111
# Get the outputs and move them to S3
116
-
117
- # Figure out how many output files there were
112
+
113
+ # Figure out how many output files there were - thanks https://stackoverflow.com/a/29769297
118
114
print ('Checking output folder size' )
119
- cmd = "find " + localOut + " -type f | wc -l"
120
- logger . info
121
- subp = subprocess . Popen ( cmd . split (), stdout = subprocess . PIPE , stderr = subprocess . PIPE )
122
- out , err = subp . communicate ( )
123
- if int ( out ) >= int (EXPECTED_NUMBER_FILES ):
115
+ filenum = 0
116
+ for _ , _ , filenames in os . walk ( localOut ):
117
+ filenum += len ( filenames )
118
+ print ( localOut , filenum )
119
+ if filenum >= int (EXPECTED_NUMBER_FILES ):
124
120
mvtries = 0
125
121
while mvtries < 3 :
126
- try :
127
- printandlog ('Move attempt #' + str (mvtries + 1 ),logger )
128
- cmd = 'aws s3 mv ' + localOut + ' s3://' + AWS_BUCKET + '/' + remoteOut + ' --recursive'
129
- subp = subprocess .Popen (cmd .split (), stdout = subprocess .PIPE , stderr = subprocess .PIPE )
130
- out ,err = subp .communicate ()
131
- printandlog ('== OUT \n ' + out , logger )
132
- if err == '' :
133
- break
134
- else :
135
- printandlog ('== ERR \n ' + err ,logger )
136
- mvtries += 1
137
- except :
138
- printandlog ('Move failed' ,logger )
139
- printandlog ('== ERR \n ' + err ,logger )
140
- time .sleep (30 )
141
- mvtries += 1
122
+ try :
123
+ printandlog ('Move attempt #' + str (mvtries + 1 ),logger )
124
+ cmd = 'aws s3 mv ' + localOut + ' s3://' + AWS_BUCKET + '/' + remoteOut + ' --recursive'
125
+ subp = subprocess .Popen (cmd .split (), stdout = subprocess .PIPE , stderr = subprocess .PIPE )
126
+ out ,err = subp .communicate ()
127
+ printandlog ('== OUT \n ' + out , logger )
128
+ if err == '' :
129
+ break
130
+ else :
131
+ printandlog ('== ERR \n ' + err ,logger )
132
+ mvtries += 1
133
+ except :
134
+ printandlog ('Move failed' ,logger )
135
+ printandlog ('== ERR \n ' + err ,logger )
136
+ time .sleep (30 )
137
+ mvtries += 1
142
138
if mvtries < 3 :
143
- printandlog ('SUCCESS' ,logger )
144
- logger .removeHandler (watchtowerlogger )
145
- return 'SUCCESS'
139
+ printandlog ('SUCCESS' ,logger )
140
+ logger .removeHandler (watchtowerlogger )
141
+ return 'SUCCESS'
146
142
else :
147
- printandlog ('OUTPUT PROBLEM. Giving up on ' + metadataID ,logger )
148
- logger .removeHandler (watchtowerlogger )
149
- return 'OUTPUT_PROBLEM'
150
-
143
+ printandlog ('OUTPUT PROBLEM. Only ' + str ( filenum ) + ' files detected . Giving up on '+ metadataID ,logger )
144
+ logger .removeHandler (watchtowerlogger )
145
+ return 'OUTPUT_PROBLEM'
146
+
151
147
152
148
#################################
153
149
# MAIN WORKER LOOP
@@ -179,4 +175,3 @@ def main():
179
175
print ('Worker started' )
180
176
main ()
181
177
print ('Worker finished' )
182
-
0 commit comments