Skip to content

Commit

Permalink
Merge branch 'master' into get_query_new_status-logic
Browse files Browse the repository at this point in the history
  • Loading branch information
burnout87 committed Jun 19, 2024
2 parents d0ccec2 + 7fb9989 commit 8ff27d8
Show file tree
Hide file tree
Showing 20 changed files with 1,191 additions and 71 deletions.
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -70,4 +70,6 @@ data/dummy_prods**
.venv
js9.fits
test-dispatcher-conf-with-gallery.yaml
tests/oda-ontology.ttl
tests/oda-ontology.ttl
tests/request_files
tests/local_request_files
8 changes: 8 additions & 0 deletions cdci_data_analysis/analysis/hash.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import hashlib
import subprocess
import json
from collections import OrderedDict

Expand Down Expand Up @@ -34,3 +35,10 @@ def format_hash(x): return hashlib.md5(

# this takes care of various strange objects which can not be properly represented
return format_hash(json.dumps(o))


def make_hash_file(file_path):
command = ["md5sum", file_path]

output = subprocess.check_output(command)
return output.decode().split()[0]
75 changes: 73 additions & 2 deletions cdci_data_analysis/analysis/instrument.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,18 +27,20 @@
import string
import json
import logging

import yaml
import validators

import numpy as np
from astropy.table import Table
from urllib.parse import urlencode

from cdci_data_analysis.analysis.queries import _check_is_base_query
from ..analysis import tokenHelper, parameters
from .catalog import BasicCatalog
from .products import QueryOutput
from .queries import ProductQuery, SourceQuery, InstrumentQuery
from .io_helper import upload_file
from .io_helper import upload_file, upload_files_request

from .exceptions import RequestNotUnderstood, RequestNotAuthorized, InternalError
from ..flask_app.sentry import sentry

Expand Down Expand Up @@ -243,6 +245,12 @@ def parse_inputs_files(self,
temp_dir,
verbose,
use_scws,
upload_dir,
products_url,
bind_host,
bind_port,
request_files_dir,
decoded_token,
sentry_dsn=None):
error_message = 'Error while {step} {temp_dir_content_msg}{additional}'
# TODO probably exception handling can be further improved and/or optmized
Expand All @@ -263,6 +271,22 @@ def parse_inputs_files(self,
temp_dir=temp_dir)
step = 'setting input scw_list file'
self.set_input_products_from_fronted(input_file_path=input_file_path, par_dic=par_dic, verbose=verbose)

# any other file
step = 'uploading other files'
uploaded_files_obj = upload_files_request(request=request,
upload_dir=upload_dir)
step = 'updating par_dic with the uploaded files'
self.update_par_dic_with_uploaded_files(par_dic=par_dic,
uploaded_files_obj=uploaded_files_obj,
products_url=products_url,
bind_host=bind_host,
bind_port=bind_port)
step = 'updating ownership files'
self.update_ownership_files(uploaded_files_obj,
request_files_dir=request_files_dir,
decoded_token=decoded_token)

except RequestNotUnderstood as e:
error_message = error_message.format(step=step,
temp_dir_content_msg='',
Expand Down Expand Up @@ -684,6 +708,52 @@ def set_input_products_from_fronted(self, input_file_path, par_dic, verbose=Fals
else:
raise RuntimeError

def update_par_dic_with_uploaded_files(self, par_dic, uploaded_files_obj, products_url, bind_host, bind_port):
if validators.url(products_url, simple_host=True):
# TODO remove the dispatch-data part, better to have it extracted from the configuration file
basepath = os.path.join(products_url, 'dispatch-data/download_file')
else:
basepath = os.path.join(f"http://{bind_host}:{bind_port}", 'download_file')
for f in uploaded_files_obj:
dpars = urlencode(dict(file_list=uploaded_files_obj[f],
_is_mmoda_url=True,
return_archive=False))
download_file_url = f"{basepath}?{dpars}"
par_dic[f] = download_file_url

def update_ownership_files(self, uploaded_files_obj, request_files_dir, decoded_token=None):
if decoded_token is not None:
user_email = tokenHelper.get_token_user_email_address(decoded_token)
user_roles = tokenHelper.get_token_roles(decoded_token)
else:
user_email = 'public'
user_roles = []

update_file = False
for file_name in uploaded_files_obj:
file_hash = uploaded_files_obj[file_name]
ownership_file_path = os.path.join(request_files_dir, f'{file_hash}_ownerships.json')
if not os.path.exists(ownership_file_path):
ownerships = dict(
user_emails=[user_email],
user_roles=user_roles
)
update_file = True
else:
with open(ownership_file_path) as ownership_file:
ownerships = json.load(ownership_file)
if user_email not in ownerships['user_emails']:
ownerships['user_emails'].append(user_email)
update_file = True
if not all(role in ownerships['user_roles'] for role in user_roles):
set_user_roles = set(ownerships['user_roles'])
set_user_roles |= set(user_roles)
ownerships['user_roles'] = list(set_user_roles)
update_file = True
if update_file:
with open(ownership_file_path, 'w') as ownership_file:
json.dump(ownerships, ownership_file)

def set_input_products(self, par_dic, input_file_path,input_prod_list_name):
if input_file_path is None:
#if no file we pass OK condition
Expand Down Expand Up @@ -716,6 +786,7 @@ def set_input_products(self, par_dic, input_file_path,input_prod_list_name):

return has_prods


def upload_catalog_from_fronted(self, par_dic, request, temp_dir):
cat_file_path = None
if request.method == 'POST':
Expand Down
19 changes: 19 additions & 0 deletions cdci_data_analysis/analysis/io_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,9 @@
from flask import request
from werkzeug.utils import secure_filename
import decorator

from .hash import make_hash_file

# Dependencies
# eg numpy
# absolute import eg: import numpy as np
Expand Down Expand Up @@ -206,6 +209,22 @@ def upload_file(name, dir):
return file_path


def upload_files_request(request, upload_dir):
uploaded_files_obj = {}
if request.method == 'POST':
for f in request.files:
# TODO needed since those two files are extracted in a previous step
if f != 'user_scw_list_file' and f != 'user_catalog_file':
f_path = upload_file(f, upload_dir)
if f_path is not None:
file_hash = make_hash_file(f_path)
new_file_name = file_hash
new_file_path = os.path.join(upload_dir, new_file_name)
uploaded_files_obj[f] = new_file_name
os.rename(f_path, new_file_path)
return uploaded_files_obj


def format_size(size_bytes, format_returned='M'):
size_bytes = float(size_bytes)
size_kb = size_bytes / 1024
Expand Down
8 changes: 5 additions & 3 deletions cdci_data_analysis/analysis/job_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,17 +163,19 @@ def get_status(self):
return self.monitor['status']

def get_latest_monitor_mtime(self):
last_modified_time = None
job_monitor_path = os.path.join(self.work_dir, 'job_monitor.json')
if os.path.exists(job_monitor_path):
last_modified_time = os.path.getmtime(job_monitor_path)
else:
logger.warning(f"no job_monitor.json found in {self.work_dir}")
raise FileNotFoundError(f"no job_monitor.json found in {self.work_dir}")
return last_modified_time

def updated_dataserver_monitor(self,):
# TODO: combine all files

job_monitor_path = os.path.join(self.work_dir, 'job_monitor.json')
try:
with open(self.file_path, 'r') as infile:
with open(job_monitor_path, 'r') as infile:
#print("=====> reading from ", self.file_path)
self.monitor = json.load(infile,)

Expand Down
13 changes: 13 additions & 0 deletions cdci_data_analysis/analysis/parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -580,9 +580,21 @@ def __init__(self, value=None, name_format='str', name=None, allowed_values = No
def check_name_value(value, units=None, name=None, par_format=None):
pass

class LongString(String):
owl_uris = String.owl_uris + ("http://odahub.io/ontology#LongString",)

class Name(String):
owl_uris = String.owl_uris + ("http://odahub.io/ontology#AstrophysicalObject",)

class FileReference(String):
owl_uris = String.owl_uris + ("http://odahub.io/ontology#FileReference",)

class POSIXPath(FileReference):
owl_uris = FileReference.owl_uris + ("http://odahub.io/ontology#POSIXPath",)

class FileURL(FileReference):
owl_uris = FileReference.owl_uris + ("http://odahub.io/ontology#FileURL",)

class NumericParameter(Parameter):
owl_uris = ("http://odahub.io/ontology#NumericParameter")

Expand Down Expand Up @@ -1049,6 +1061,7 @@ def check_value(self):
def get_default_value(self):
return json.dumps(self.value, sort_keys=True)


class PhosphorosFiltersTable(StructuredParameter):
owl_uris = ('http://odahub.io/ontology#PhosphorosFiltersTable')

Expand Down
32 changes: 28 additions & 4 deletions cdci_data_analysis/flask_app/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,8 @@ def run_api_instr_list():
logger.warning('\nThe endpoint \'/api/instr-list\' is deprecated and you will be automatically redirected to the '
'\'/instr-list\' endpoint. Please use this one in the future.\n')

if app.config['conf'].products_url is not None and validators.url(app.config['conf'].products_url):
if app.config['conf'].products_url is not None and validators.url(app.config['conf'].products_url, simple_host=True):
# TODO remove the dispatch-data part, better to have it extracted from the configuration file
redirection_url = os.path.join(app.config['conf'].products_url, 'dispatch-data/instr-list')
if request.args:
args_request = urlencode(request.args)
Expand Down Expand Up @@ -148,10 +149,33 @@ def meta_data_src():
return query.get_meta_data('src_query')


@app.route("/download_products", methods=['POST', 'GET'])
@app.route("/download_products", methods=['POST', 'GET', 'HEAD'])
def download_products():
query = InstrumentQueryBackEnd(app, download_products=True)
return query.download_products()
from_request_files_dir = request.args.get('from_request_files_dir', 'False') == 'True'
download_file = request.args.get('download_file', 'False') == 'True'
download_products = request.args.get('download_products', 'True') == 'True'
query = InstrumentQueryBackEnd(app, download_products=download_products, download_files=download_file)
return query.download_file(from_request_files_dir=from_request_files_dir)


@app.route("/download_file", methods=['POST', 'GET', 'HEAD'])
def download_file():
if app.config['conf'].products_url is not None and validators.url(app.config['conf'].products_url, simple_host=True):
# TODO remove the dispatch-data part, better to have it extracted from the configuration file
redirection_url = os.path.join(app.config['conf'].products_url, 'dispatch-data/download_products')
if request.args:
args_request = urlencode(request.args)
redirection_url = f'{redirection_url}?{args_request}'

else:
parsed_request_url = urlparse(request.url)
path_request_url = parsed_request_url.path.replace('/download_file', '/download_products')
parsed_request_url = parsed_request_url._replace(path=path_request_url)
redirection_url = parsed_request_url.geturl()

redirection_url += f'&from_request_files_dir=True&download_file=True&download_products=False'

return redirect(redirection_url)


class UnknownDispatcherException(Exception):
Expand Down
Loading

0 comments on commit 8ff27d8

Please sign in to comment.