Commit fca93af0 authored by Yakubov, Sergey's avatar Yakubov, Sergey
Browse files

Merge branch '76-proper-fail-job-on-object-store-error' into 'dev'

Resolve "Proper fail job on object store error"

Closes #76

See merge request !61
parents dd6bfeae 6518668b
Loading
Loading
Loading
Loading
Loading
+2 −2
Original line number Diff line number Diff line
@@ -9,8 +9,8 @@ variables:
  CONTAINER_GALAXY_URL: "${CI_REGISTRY_IMAGE}"
  CONTAINER_GALAXY_BASE_URL: "${CONTAINER_GALAXY_URL}/base"
  CONTAINER_GALAXY_COMMIT_URL: "${CONTAINER_GALAXY_URL}/commit"
  GALAXY_VERSION_PYTHON: 23.1.dev0+ornl
  GALAXY_VERSION_DOCKER: 23.1.dev0.ornl
  GALAXY_VERSION_PYTHON: 23.1.dev1+ornl
  GALAXY_VERSION_DOCKER: 23.1.dev1.ornl

# This import is for the func_rse_docker_* functions
before_script:
+2 −0
Original line number Diff line number Diff line
@@ -1971,6 +1971,8 @@ class MinimalJobWrapper(HasResourceParameters):
                log.debug("(%s) setting dataset %s state to ERROR", job.id, dataset_assoc.dataset.dataset.id)
                # TODO: This is where the state is being set to error. Change it!
                dataset_assoc.dataset.dataset.state = model.Dataset.states.ERROR
                dataset_assoc.dataset.update_time = datetime.datetime.now()

                # Pause any dependent jobs (and those jobs' outputs)
                for dep_job_assoc in dataset_assoc.dataset.dependent_jobs:
                    self.pause(
+1 −0
Original line number Diff line number Diff line
@@ -365,6 +365,7 @@ class RucioBroker:
                        "force_scheme": rse_scheme["scheme"],
                        "rse": rse_scheme["rse"],
                        "base_dir": base_dir,
                        "check_local_with_filesize_only": string_as_bool(rse_scheme["ignore_checksum"]),
                        "ignore_checksum": string_as_bool(rse_scheme["ignore_checksum"]),
                        "no_subdir": True,
                    }
+1 −1
Original line number Diff line number Diff line
@@ -49,7 +49,7 @@ class RegisterToolAction(BaseUploadToolAction):
        outputs = []
        for item in incoming.get("series", []):
            name = item.get("input", None)
            file_type = "_sniff_"
            file_type = os.path.splitext(name)[1][1:]
            dbkey = "?"
            uploaded_dataset = Bunch(type="file", name=name, file_type=file_type, dbkey=dbkey)
            tag_list = []