Skip to content

Commit

Permalink
no more image previews in Redis (that was always a bad idea anyway), …
Browse files Browse the repository at this point in the history
…show Data Store in task view, various debugging
  • Loading branch information
wpbonelli committed Aug 31, 2021
1 parent 13cfd91 commit b69dd96
Show file tree
Hide file tree
Showing 14 changed files with 110 additions and 208 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,7 @@ WORKFLOWS_CACHE=/code/workflows.json
WORKFLOWS_REFRESH_MINUTES=60
TASKS_LOGS=/code/logs
TASKS_TIMEOUT_MULTIPLIER=2
TASKS_STEP_TIME_LIMIT_SECONDS=20
LAUNCHER_SCRIPT_NAME=launch
SQL_ENGINE=django.db.backends.postgresql
SQL_HOST=postgres
Expand Down
2 changes: 2 additions & 0 deletions docker-compose.dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ services:
- TASKS_LOGS=${TASKS_LOGS}
- TASKS_REFRESH_SECONDS=${TASKS_REFRESH_SECONDS}
- TASKS_CLEANUP_MINUTES=${TASKS_CLEANUP_MINUTES}
- TASKS_STEP_TIME_LIMIT_SECONDS=${TASKS_STEP_TIME_LIMIT_SECONDS}
- SQL_ENGINE=${SQL_ENGINE}
- SQL_HOST=${SQL_HOST}
- SQL_PORT=${SQL_PORT}
Expand Down Expand Up @@ -134,6 +135,7 @@ services:
- TASKS_LOGS=${TASKS_LOGS}
- TASKS_REFRESH_SECONDS=${TASKS_REFRESH_SECONDS}
- TASKS_CLEANUP_MINUTES=${TASKS_CLEANUP_MINUTES}
- TASKS_STEP_TIME_LIMIT_SECONDS=${TASKS_STEP_TIME_LIMIT_SECONDS}
- LAUNCHER_SCRIPT_NAME=${LAUNCHER_SCRIPT_NAME}
- SQL_ENGINE=${SQL_ENGINE}
- SQL_HOST=${SQL_HOST}
Expand Down
2 changes: 2 additions & 0 deletions docker-compose.prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ services:
- TASKS_LOGS=${TASKS_LOGS}
- TASKS_REFRESH_SECONDS=${TASKS_REFRESH_SECONDS}
- TASKS_CLEANUP_MINUTES=${TASKS_CLEANUP_MINUTES}
- TASKS_STEP_TIME_LIMIT_SECONDS=${TASKS_STEP_TIME_LIMIT_SECONDS}
- LAUNCHER_SCRIPT_NAME=${LAUNCHER_SCRIPT_NAME}
- SQL_ENGINE=${SQL_ENGINE}
- SQL_HOST=${SQL_HOST}
Expand Down Expand Up @@ -107,6 +108,7 @@ services:
- TASKS_LOGS=${TASKS_LOGS}
- TASKS_REFRESH_SECONDS=${TASKS_REFRESH_SECONDS}
- TASKS_CLEANUP_MINUTES=${TASKS_CLEANUP_MINUTES}
- TASKS_STEP_TIME_LIMIT_SECONDS=${TASKS_STEP_TIME_LIMIT_SECONDS}
- LAUNCHER_SCRIPT_NAME=${LAUNCHER_SCRIPT_NAME}
- SQL_ENGINE=${SQL_ENGINE}
- SQL_HOST=${SQL_HOST}
Expand Down
5 changes: 2 additions & 3 deletions plantit/front_end/src/components/datasets/data-tree.vue
Original file line number Diff line number Diff line change
Expand Up @@ -1185,7 +1185,7 @@
>
<b-col>
<b-img-lazy
v-if="previewsLoaded"
v-if="previewsLoaded && fileIsImage(child)"
:src="getFileURL(child)"
style="width: 3rem; height: 3rem"
></b-img-lazy>
Expand All @@ -1208,8 +1208,7 @@
<b-button
id="popover-reactive-1"
:disabled="
!fileIsImage(child.label) &&
!fileIsText(child.label)
!fileIsImage(child.label)
"
:variant="profile.darkMode ? 'outline-light' : 'white'"
class="m-1"
Expand Down
39 changes: 24 additions & 15 deletions plantit/front_end/src/components/tasks/task.vue
Original file line number Diff line number Diff line change
Expand Up @@ -911,7 +911,7 @@
result(s) found
<br />
</b-col>
<b-col
<!--<b-col
md="auto"
align-self="end"
v-if="
Expand Down Expand Up @@ -971,7 +971,7 @@
"
></b-img
></b-button>
</b-col>
</b-col>-->
<b-col
md="auto"
align-self="end"
Expand Down Expand Up @@ -1748,48 +1748,57 @@
</div>-->
</b-col>
</b-row>

<!--<b-card
<b-card
v-if="
flow.config.output &&
(getRun.state === 6 || getRun.state === 0)
getTask.is_complete &&
getTask.transferred
"
:bg-variant="
profile.darkMode ? 'dark' : 'white'
"
:bg-variant="darkMode ? 'dark' : 'white'"
:footer-bg-variant="
darkMode ? 'dark' : 'white'
profile.darkMode ? 'dark' : 'white'
"
border-variant="default"
:footer-border-variant="
darkMode ? 'dark' : 'white'
profile.darkMode ? 'dark' : 'white'
"
no-body
>
<b-card-header
class="mt-1"
:header-bg-variant="
darkMode ? 'dark' : 'white'
profile.darkMode ? 'dark' : 'white'
"
><h5
:class="
darkMode
profile.darkMode
? 'text-white'
: 'text-dark'
"
>
CyVerse Data Store
Data Store
</h5></b-card-header
>
<b-card-body>
<b-row>
<b-col>
<datatree
:upload="false"
:node="
personalDatasets
"
:upload="true"
:download="true"
:node="userData"
:create="true"
:class="
profile.darkMode
? 'theme-dark'
: 'theme-light'
"
></datatree></b-col
></b-row>
</b-card-body>
</b-card>-->
</b-card>
</b-col>
</b-row>
</div>
Expand Down
4 changes: 2 additions & 2 deletions plantit/front_end/src/components/workflows/workflow.vue
Original file line number Diff line number Diff line change
Expand Up @@ -4439,8 +4439,8 @@ export default {
patterns: [],
names: []
};
config.output.include.patterns = this.outputSelectedPatterns;
config.output.include.names = this.outputSelectedNames;
config.output.include.patterns = Array.from(this.outputSelectedPatterns);
config.output.include.names = Array.from(this.outputSelectedNames);
// config.output.patterns =
// this.outputSelectedPatterns.length > 0
Expand Down
105 changes: 8 additions & 97 deletions plantit/plantit/celery_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,103 +277,6 @@ def list_task_results(guid: str, auth: dict):
# async_to_sync(push_task_event)(task)
check_cyverse_transfer_completion.s(guid).apply_async(priority=1)

# log_task_orchestrator_status(task, [f"Creating file previews"])
# async_to_sync(push_task_event)(task)

# with ssh:
# with ssh.client.open_sftp() as sftp:
# sftp.chdir(workdir)
# for result in expected:
# name = result['name']
# path = result['path']
# exists = result['exists']

# if not exists: continue
# if name.endswith('txt') or \
# name.endswith('csv') or \
# name.endswith('yml') or \
# name.endswith('yaml') or \
# name.endswith('tsv') or \
# name.endswith('out') or \
# name.endswith('err') or \
# name.endswith('log'):
# logger.info(f"Creating preview for text file: {name}")
# with tempfile.NamedTemporaryFile() as temp_file:
# sftp.get(name, temp_file.name)

# try:
# preview = previews.get_jpeg_preview(temp_file.name, width=1024, height=1024)
# except UnsupportedMimeType:
# redis.set(f"previews/{task.user.username}/{task.guid}/{name}", 'EMPTY')
# logger.info(f"Saved empty file preview to cache: {name}")
# continue

# with open(preview, 'rb') as pf:
# content = pf.read()
# encoded = base64.b64encode(content)
# redis.set(f"previews/{task.user.username}/{task.guid}/{name}", encoded)
# logger.info(f"Saved file preview to cache: {name}")
# elif path.endswith('png'):
# logger.info(f"Creating preview for PNG file: {name}")
# with tempfile.NamedTemporaryFile() as temp_file:
# sftp.get(result['name'], temp_file.name)

# try:
# preview = previews.get_jpeg_preview(temp_file.name, width=1024, height=1024)
# except UnsupportedMimeType:
# redis.set(f"previews/{task.user.username}/{task.guid}/{name}", 'EMPTY')
# logger.info(f"Saved empty preview for PNG file to cache: {name}")
# continue

# with open(preview, 'rb') as pf:
# content = pf.read()
# encoded = base64.b64encode(content)
# redis.set(f"previews/{task.user.username}/{task.guid}/{name}", encoded)
# logger.info(f"Saved file preview to cache: {name}")
# elif path.endswith('jpg') or path.endswith('jpeg'):
# logger.info(f"Creating preview for JPG file: {name}")
# with tempfile.NamedTemporaryFile() as temp_file:
# sftp.get(result['name'], temp_file.name)

# try:
# preview = previews.get_jpeg_preview(temp_file.name, width=1024, height=1024)
# except UnsupportedMimeType:
# redis.set(f"previews/{task.user.username}/{task.guid}/{name}", 'EMPTY')
# logger.info(f"Saved empty preview for JPG file to cache: {name}")
# continue

# with open(preview, 'rb') as pf:
# content = pf.read()
# encoded = base64.b64encode(content)
# redis.set(f"previews/{task.user.username}/{task.guid}/{name}", encoded)
# logger.info(f"Saved JPG file preview to cache: {name}")
# elif path.endswith('czi'):
# logger.info(f"Creating preview for CZI file: {name}")
# with tempfile.NamedTemporaryFile() as temp_file:
# sftp.get(result['name'], temp_file.name)

# image = czifile.imread(temp_file.name)
# image.shape = (image.shape[2], image.shape[3], image.shape[4])
# success, buffer = cv2.imencode(".jpg", image)
# buffer.tofile(temp_file.name)

# try:
# preview = previews.get_jpeg_preview(temp_file.name, width=1024, height=1024)
# except UnsupportedMimeType:
# redis.set(f"previews/{task.user.username}/{task.guid}/{name}", 'EMPTY')
# logger.info(f"Saved empty preview for CZI file to cache: {name}")
# continue

# with open(preview, 'rb') as pf:
# content = pf.read()
# encoded = base64.b64encode(content)
# redis.set(f"previews/{task.user.username}/{task.guid}/{name}", encoded)
# logger.info(f"Saved file preview to cache: {name}")
# elif path.endswith('ply'):
# logger.info(f"Creating preview for PLY file: {name}")
# with tempfile.NamedTemporaryFile() as temp_file:
# sftp.get(result['name'], temp_file.name)

cleanup_delay = int(environ.get('TASKS_CLEANUP_MINUTES')) * 60
cleanup_task.s(guid, auth).apply_async(countdown=cleanup_delay, priority=2)
task.cleanup_time = timezone.now() + timedelta(seconds=cleanup_delay)
Expand Down Expand Up @@ -402,13 +305,21 @@ def check_cyverse_transfer_completion(guid: str, iteration: int = 0):
else:
msg = f"Transfer to CyVerse directory {path} completed"
logger.info(msg)
task.transferred = True
task.results_transferred = len(expected)
task.transfer_path = path
task.save()

log_task_orchestrator_status(task, [msg])
async_to_sync(push_task_event)(task)


@app.task()
def check_task_completion(guid: str, auth):
# TODO logic for local vs jobqueue tasks
pass


# @app.task()
# def clean_agent_singularity_cache(agent_name: str):
# try:
Expand Down
3 changes: 2 additions & 1 deletion plantit/plantit/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
assert 'TASKS_TIMEOUT_MULTIPLIER' in os.environ, f"{missing_variable}: TASKS_TIMEOUT_MULTIPLIER"
assert 'TASKS_REFRESH_SECONDS' in os.environ, f"{missing_variable}: TASKS_REFRESH_SECONDS"
assert 'TASKS_CLEANUP_MINUTES' in os.environ, f"{missing_variable}: TASKS_CLEANUP_MINUTES"
assert 'TASKS_STEP_TIME_LIMIT_SECONDS' in os.environ, f"{missing_variable}: TASKS_STEP_TIME_LIMIT_SECONDS"
assert 'LAUNCHER_SCRIPT_NAME' in os.environ, f"{missing_variable}: LAUNCHER_SCRIPT_NAME"
assert 'DJANGO_API_URL' in os.environ, f"{missing_variable}: DJANGO_API_URL"
assert 'CYVERSE_REDIRECT_URL' in os.environ, f"{missing_variable}: CYVERSE_REDIRECT_URL"
Expand All @@ -38,7 +39,6 @@
assert 'AGENTS_HEALTHCHECKS_MINUTES' in os.environ, f"{missing_variable} AGENTS_HEALTHCHECKS_MINUTES"
assert 'AGENTS_HEALTHCHECKS_SAVED' in os.environ, f"{missing_variable} AGENTS_HEALTHCHECKS_SAVED"


MAPBOX_TOKEN = os.environ.get('MAPBOX_TOKEN')
MAPBOX_FEATURE_REFRESH_MINUTES = os.environ.get('MAPBOX_FEATURE_REFRESH_MINUTES')
CYVERSE_TOKEN_REFRESH_MINUTES = os.environ.get('CYVERSE_TOKEN_REFRESH_MINUTES')
Expand All @@ -58,6 +58,7 @@
TASKS_TIMEOUT_MULTIPLIER = os.environ.get('TASKS_TIMEOUT_MULTIPLIER')
TASKS_REFRESH_SECONDS = os.environ.get('TASKS_REFRESH_SECONDS')
TASKS_CLEANUP_MINUTES = os.environ.get('TASKS_CLEANUP_MINUTES')
TASKS_STEP_TIME_LIMIT_SECONDS = os.environ.get('TASKS_STEP_TIME_LIMIT_SECONDS')
NO_PREVIEW_THUMBNAIL = os.environ.get('NO_PREVIEW_THUMBNAIL')
LAUNCHER_SCRIPT_NAME = os.environ.get('LAUNCHER_SCRIPT_NAME')
AWS_FEEDBACK_ARN = os.environ.get("AWS_FEEDBACK_ARN")
Expand Down
1 change: 1 addition & 0 deletions plantit/plantit/tasks/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ class Meta:
completed = models.DateTimeField(null=True, blank=True)
celery_task_id = models.CharField(max_length=50, null=True, blank=True)
transferred = models.BooleanField(default=False)
transfer_path = models.CharField(max_length=250, null=True, blank=True)
due_time = models.DateTimeField(null=True, blank=True)
cleanup_time = models.DateTimeField(null=True, blank=True)

Expand Down
9 changes: 4 additions & 5 deletions plantit/plantit/tasks/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,13 @@
path(r'<owner>/<name>/delete/', views.delete),
path(r'<owner>/<name>/output/', views.get_output_file),
# path(r'<owner>/<name>/file_text/', views.get_file_text),
path(r'<owner>/<name>/thumbnail/', views.get_thumbnail),
# path(r'<owner>/<name>/3d_model/', views.get_3d_model),
path(r'<owner>/<name>/orchestrator_logs/', views.get_task_logs),
path(r'<owner>/<name>/orchestrator_logs_content/', views.get_task_logs_content),
path(r'<owner>/<name>/scheduler_logs/', views.get_scheduler_logs),
path(r'<owner>/<name>/scheduler_logs_content/', views.get_scheduler_logs_content),
path(r'<owner>/<name>/agent_logs/', views.get_agent_logs),
path(r'<owner>/<name>/agent_logs_content/', views.get_agent_logs_content),
path(r'<owner>/<name>/scheduler_logs/', views.get_scheduler_logs),
path(r'<owner>/<name>/scheduler_logs_content/', views.get_scheduler_logs_content),
path(r'<owner>/<name>/orchestrator_logs/', views.get_task_logs),
path(r'<owner>/<name>/orchestrator_logs_content/', views.get_task_logs_content),
path(r'<owner>/<name>/transfer/', views.transfer_to_cyverse),
path(r'search/<owner>/<workflow_name>/<page>/', views.search),
]
Expand Down
Loading

0 comments on commit b69dd96

Please sign in to comment.