diff --git a/.dockstore.yml b/.dockstore.yml index f7e7800447f..d3871bcd62d 100644 --- a/.dockstore.yml +++ b/.dockstore.yml @@ -101,7 +101,7 @@ workflows: - /.*/ - name: GvsBenchmarkExtractTask subclass: WDL - primaryDescriptorPath: /scripts/variantstore/wdl/GvsBenchmarkExtractTask.wdl + primaryDescriptorPath: /scripts/variantstore/wdl/test/GvsBenchmarkExtractTask.wdl filters: branches: - master @@ -200,7 +200,7 @@ workflows: - /.*/ - name: GvsCreateVATfromVDS subclass: WDL - primaryDescriptorPath: /scripts/variantstore/wdl/GvsCreateVATfromVDS.wdl + primaryDescriptorPath: /scripts/variantstore/wdl/variant-annotations-table/GvsCreateVATfromVDS.wdl filters: branches: - master @@ -209,7 +209,7 @@ workflows: - /.*/ - name: GvsCreateVATFilesFromBigQuery subclass: WDL - primaryDescriptorPath: /scripts/variantstore/variant_annotations_table/GvsCreateVATFilesFromBigQuery.wdl + primaryDescriptorPath: /scripts/variantstore/variant-annotations-table/GvsCreateVATFilesFromBigQuery.wdl filters: branches: - master @@ -218,9 +218,9 @@ workflows: - /.*/ - name: GvsValidateVat subclass: WDL - primaryDescriptorPath: /scripts/variantstore/variant_annotations_table/GvsValidateVAT.wdl + primaryDescriptorPath: /scripts/variantstore/variant-annotations-table/GvsValidateVAT.wdl testParameterFiles: - - /scripts/variantstore/variant_annotations_table/GvsValidateVat.example.inputs.json + - /scripts/variantstore/variant-annotations-table/GvsValidateVat.example.inputs.json filters: branches: - master @@ -285,7 +285,7 @@ workflows: - /.*/ - name: GvsQuickstartVcfIntegration subclass: WDL - primaryDescriptorPath: /scripts/variantstore/wdl/GvsQuickstartVcfIntegration.wdl + primaryDescriptorPath: /scripts/variantstore/wdl/test/GvsQuickstartVcfIntegration.wdl filters: branches: - master @@ -294,7 +294,7 @@ workflows: - /.*/ - name: GvsQuickstartHailIntegration subclass: WDL - primaryDescriptorPath: /scripts/variantstore/wdl/GvsQuickstartHailIntegration.wdl + primaryDescriptorPath: /scripts/variantstore/wdl/test/GvsQuickstartHailIntegration.wdl filters: branches: - master @@ -303,7 +303,7 @@ workflows: - /.*/ - name: GvsQuickstartIntegration subclass: WDL - primaryDescriptorPath: /scripts/variantstore/wdl/GvsQuickstartIntegration.wdl + primaryDescriptorPath: /scripts/variantstore/wdl/test/GvsQuickstartIntegration.wdl filters: branches: - master @@ -313,7 +313,7 @@ workflows: - /.*/ - name: GvsIngestTieout subclass: WDL - primaryDescriptorPath: /scripts/variantstore/wdl/GvsIngestTieout.wdl + primaryDescriptorPath: /scripts/variantstore/wdl/test/GvsIngestTieout.wdl filters: branches: - master @@ -358,21 +358,13 @@ workflows: - /.*/ - name: GvsTieoutVcfMaxAltAlleles subclass: WDL - primaryDescriptorPath: /scripts/variantstore/wdl/GvsTieoutVcfMaxAltAlleles.wdl + primaryDescriptorPath: /scripts/variantstore/wdl/test/GvsTieoutVcfMaxAltAlleles.wdl filters: branches: - ah_var_store - master tags: - /.*/ - - name: HailFromWdl - subclass: WDL - primaryDescriptorPath: /scripts/variantstore/wdl/HailFromWdl.wdl - filters: - branches: - - master - tags: - - /.*/ - name: MitochondriaPipeline subclass: WDL primaryDescriptorPath: /scripts/mitochondria_m2_wdl/MitochondriaPipeline.wdl @@ -448,7 +440,7 @@ workflows: - EchoCallset - name: GvsTieoutPgenToVcf subclass: WDL - primaryDescriptorPath: /scripts/variantstore/wdl/GvsTieoutPgenToVcf.wdl + primaryDescriptorPath: /scripts/variantstore/wdl/test/GvsTieoutPgenToVcf.wdl filters: branches: - ah_var_store diff --git a/scripts/variantstore/docs/aou/AOU_DELIVERABLES.md b/scripts/variantstore/docs/aou/AOU_DELIVERABLES.md index a1cb46ae9f6..ed2b601cc81 100644 --- a/scripts/variantstore/docs/aou/AOU_DELIVERABLES.md +++ b/scripts/variantstore/docs/aou/AOU_DELIVERABLES.md @@ -111,7 +111,7 @@ The Callset Stats and S&P files can be simply `gsutil cp`ed to the AoU delivery ## Running the VAT pipeline To create a BigQuery table of variant annotations, you may follow the instructions here: -[process to create variant annotations table](../../variant_annotations_table/README.md) +[process to create variant annotations table](../../variant-annotations-table/README.md) The pipeline takes in the VDS and outputs a variant annotations table in BigQuery. Once the VAT table is created and a tsv is exported, the AoU research workbench team should be notified of its creation and permission should be granted so that several members of the team have view permission. diff --git a/scripts/variantstore/variant_annotations_table/Dockerfile b/scripts/variantstore/variant-annotations-table/Dockerfile similarity index 100% rename from scripts/variantstore/variant_annotations_table/Dockerfile rename to scripts/variantstore/variant-annotations-table/Dockerfile diff --git a/scripts/variantstore/variant_annotations_table/GvsCreateVATFilesFromBigQuery.wdl b/scripts/variantstore/variant-annotations-table/GvsCreateVATFilesFromBigQuery.wdl similarity index 100% rename from scripts/variantstore/variant_annotations_table/GvsCreateVATFilesFromBigQuery.wdl rename to scripts/variantstore/variant-annotations-table/GvsCreateVATFilesFromBigQuery.wdl diff --git a/scripts/variantstore/wdl/GvsCreateVATfromVDS.wdl b/scripts/variantstore/variant-annotations-table/GvsCreateVATfromVDS.wdl similarity index 99% rename from scripts/variantstore/wdl/GvsCreateVATfromVDS.wdl rename to scripts/variantstore/variant-annotations-table/GvsCreateVATfromVDS.wdl index 9ddbe7094fc..805d324d861 100644 --- a/scripts/variantstore/wdl/GvsCreateVATfromVDS.wdl +++ b/scripts/variantstore/variant-annotations-table/GvsCreateVATfromVDS.wdl @@ -1,7 +1,7 @@ version 1.0 -import "GvsUtils.wdl" as Utils -import "../variant_annotations_table/GvsCreateVATFilesFromBigQuery.wdl" as GvsCreateVATFilesFromBigQuery +import "../wdl/GvsUtils.wdl" as Utils +import "GvsCreateVATFilesFromBigQuery.wdl" as GvsCreateVATFilesFromBigQuery workflow GvsCreateVATfromVDS { input { diff --git a/scripts/variantstore/variant_annotations_table/GvsValidateVAT.example.inputs.json b/scripts/variantstore/variant-annotations-table/GvsValidateVAT.example.inputs.json similarity index 100% rename from scripts/variantstore/variant_annotations_table/GvsValidateVAT.example.inputs.json rename to scripts/variantstore/variant-annotations-table/GvsValidateVAT.example.inputs.json diff --git a/scripts/variantstore/variant_annotations_table/GvsValidateVAT.wdl b/scripts/variantstore/variant-annotations-table/GvsValidateVAT.wdl similarity index 100% rename from scripts/variantstore/variant_annotations_table/GvsValidateVAT.wdl rename to scripts/variantstore/variant-annotations-table/GvsValidateVAT.wdl diff --git a/scripts/variantstore/variant_annotations_table/README.md b/scripts/variantstore/variant-annotations-table/README.md similarity index 93% rename from scripts/variantstore/variant_annotations_table/README.md rename to scripts/variantstore/variant-annotations-table/README.md index 111a19de57a..ec2cbff612f 100644 --- a/scripts/variantstore/variant_annotations_table/README.md +++ b/scripts/variantstore/variant-annotations-table/README.md @@ -5,8 +5,8 @@ The pipeline takes in a Hail Variant Dataset (VDS), creates a queryable table in ### VAT WDLs -- [GvsCreateVATfromVDS.wdl](/scripts/variantstore/wdl/GvsCreateVATfromVDS.wdl) creates a sites only VCF from a VDS and then uses that and an ancestry file TSV to build the variant annotations table. -- [GvsValidateVAT.wdl](/scripts/variantstore/variant_annotations_table/GvsValidateVAT.wdl) checks and validates the created VAT and prints a report of any failing validation. +- [GvsCreateVATfromVDS.wdl](/scripts/variantstore/variant-annotations-table/GvsCreateVATfromVDS.wdl) creates a sites only VCF from a VDS and then uses that and an ancestry file TSV to build the variant annotations table. +- [GvsValidateVAT.wdl](/scripts/variantstore/variant-annotations-table/GvsValidateVAT.wdl) checks and validates the created VAT and prints a report of any failing validation. ### Run GvsCreateVATfromVDS diff --git a/scripts/variantstore/variant_annotations_table/Reference Disk Terra Opt In.png b/scripts/variantstore/variant-annotations-table/Reference Disk Terra Opt In.png similarity index 100% rename from scripts/variantstore/variant_annotations_table/Reference Disk Terra Opt In.png rename to scripts/variantstore/variant-annotations-table/Reference Disk Terra Opt In.png diff --git a/scripts/variantstore/variant_annotations_table/build_docker.sh b/scripts/variantstore/variant-annotations-table/build_docker.sh similarity index 100% rename from scripts/variantstore/variant_annotations_table/build_docker.sh rename to scripts/variantstore/variant-annotations-table/build_docker.sh diff --git a/scripts/variantstore/variant_annotations_table/custom_annotations_template.tsv b/scripts/variantstore/variant-annotations-table/custom_annotations_template.tsv similarity index 100% rename from scripts/variantstore/variant_annotations_table/custom_annotations_template.tsv rename to scripts/variantstore/variant-annotations-table/custom_annotations_template.tsv diff --git a/scripts/variantstore/wdl/HailFromWdl.wdl b/scripts/variantstore/wdl/HailFromWdl.wdl deleted file mode 100644 index 4438525ca2d..00000000000 --- a/scripts/variantstore/wdl/HailFromWdl.wdl +++ /dev/null @@ -1,249 +0,0 @@ -version 1.0 -# Largely "borrowing" from Lee's work -# https://github.com/broadinstitute/aou-ancestry/blob/a57bbab3ccee4d06317fecb8ca109424bca373b7/script/wdl/hail_in_wdl/filter_VDS_and_shard_by_contig.wdl - -# -# Given a VDS and a bed file, render a VCF (sharded by chromosome). -# All bed files referenced in this WDL are UCSC bed files (as opposed to PLINK bed files). -# -# This has not been tested on any reference other than hg38. -# Inputs: -# -# ## ANALYSIS PARAMETERS -# # ie, parameters that go to the Hail python code (submission_script below) -# String vds_url -# -# # Genomic region for the output VCFs to cover -# String bed_url -# -# # VCF Header that will be used in the output -# String vcf_header_url -# -# # Contigs of interest. If a contig is present in the bed file, but not in this list, the contig will be ignored. -# # In other words, this is a contig level intersection with the bed file. -# # This list of contigs that must be present in the reference. Each contig will be processed separately (shard) -# # This list should be ordered. Eg, ["chr21", "chr22"] -# Array[String] contigs -# -# # String used in construction of output filename -# # Cannot contain any special characters, ie, characters must be alphanumeric or "-" -# String prefix -# -# ## CLUSTER PARAMETERS -# # Number of workers (per shard) to use in the Hail cluster. -# Int num_workers -# -# # Set to 'subnetwork' if running in Terra Cromwell -# String gcs_subnetwork_name='subnetwork' -# -# # The script that is run on the cluster -# # See filter_VDS_and_shard_by_contig.py for an example. -# File submission_script -# -# # Set to "us-central1" if running in Terra Cromwell -# String region = "us-central1" -# -# ## VM PARAMETERS -# # Please note that there is a RuntimeAttr struct and a task parameter that can be used to override the defaults -# # of the VM. These are task parameters. -# # However, since this can be a lightweight VM, overriding is unlikely to be necessary. -# -# # The docker to be used on the VM. This will need both Hail and Google Cloud SDK installed. -# String hail_docker="us.gcr.io/broad-dsde-methods/lichtens/hail_dataproc_wdl:1.0" -# -# Important notes: -# - Hail will save the VCFs in the cloud. You will need to provide this storage space. In other words, the runtime -# parameters must have enough storage space to support a single contig -# - This WDL script is still dependent on the python/Hail script that it calls. You will see this when the parameters -# are passed into the script. -# - This WDL is boilerplate, except for input parameters, output parameters, and where marked in the main task. -# - We HIGHLY recommend that the WDL is NOT run on a preemptible VM -# (reminder, this is a single VM that spins up the dataproc cluster and submits jobs -- it is not doing any of the -# actual computation. In other words, it does not need to be a heavy machine.) -# In other words, always set `preemptible_tries` to zero (default). -# - -import "GvsUtils.wdl" as Utils - -struct RuntimeAttr { - Float? mem_gb - Int? cpu_cores - Int? disk_gb - Int? boot_disk_gb - Int? preemptible_tries - Int? max_retries -} - -workflow filter_vds_to_VCF_by_chr { - ### Change here: You will need to specify all parameters (both analysis and runtime) that need to go to the - # cluster, VM spinning up the cluster, and the script being run on the cluster. - input { - - ## ANALYSIS PARAMETERS - # ie, parameters that go to the Hail python code (submission_script below) - String vds_url - - String? git_branch_or_tag - String? hail_version - String? worker_machine_type - - # Genomic region for the output VCFs to cover - String bed_url = "gs://broad-public-datasets/gvs/weights/gvs_vet_weights_1kb.bed" - - # VCF Header that will be used in the output - String vcf_header_url = "gs://gvs_quickstart_storage/hail_from_wdl/vcf_header.txt" - - # Contigs of interest. If a contig is present in the bed file, but not in this list, the contig will be ignored. - # In other words, this is a contig level intersection with the bed file. - # This list of contigs that must be present in the reference. Each contig will be processed separately (shard) - # This list should be ordered. Eg, ["chr21", "chr22"] - Array[String] contigs = ["chr20"] - - # String used in construction of output filename - # Cannot contain any special characters, ie, characters must be alphanumeric or "-" - String prefix = "hail-from-wdl" - - ## CLUSTER PARAMETERS - # Number of workers (per shard) to use in the Hail cluster. - Int num_workers = 10 - - # Set to 'subnetwork' if running in Terra Cromwell - String gcs_subnetwork_name = 'subnetwork' - - # The script that is run on the cluster - # See filter_VDS_and_shard_by_contig.py for an example. - File? submission_script - - # Set to "us-central1" if running in Terra Cromwell - String region = "us-central1" - } - - call Utils.GetToolVersions - - scatter (contig in contigs) { - call filter_vds_and_export_as_vcf { - input: - vds_url = vds_url, - bed_url = bed_url, - contig = contig, - prefix = prefix, - gcs_project = GetToolVersions.google_project, - num_workers = num_workers, - gcs_subnetwork_name = gcs_subnetwork_name, - vcf_header_url = vcf_header_url, - git_branch_or_tag = git_branch_or_tag, - hail_version = hail_version, - worker_machine_type = worker_machine_type, - submission_script = submission_script, - cloud_sdk_slim_docker = GetToolVersions.cloud_sdk_slim_docker, - region = region, - } - } - - output { - Array[File] vcfs = filter_vds_and_export_as_vcf.vcf - } -} - -task filter_vds_and_export_as_vcf { - input { - # You must treat a VDS as a String, since it is a directory and not a single file - String vds_url - String bed_url - String vcf_header_url - - String? git_branch_or_tag - File? submission_script - String? hail_version - String? worker_machine_type - - # contig must be in the reference - String contig - String prefix - String gcs_project - String region = "us-central1" - Int num_workers - RuntimeAttr? runtime_attr_override - String gcs_subnetwork_name - - String cloud_sdk_slim_docker - } - - RuntimeAttr runtime_default = object { - mem_gb: 30, - disk_gb: 100, - cpu_cores: 1, - preemptible_tries: 0, - max_retries: 0, - boot_disk_gb: 10 - } - RuntimeAttr runtime_override = select_first([runtime_attr_override, runtime_default]) - - String default_script_filename = "filter_VDS_and_shard_by_contig.py" - - command <<< - # Prepend date, time and pwd to xtrace log entries. - PS4='\D{+%F %T} \w $ ' - set -o errexit -o nounset -o pipefail -o xtrace - - account_name=$(gcloud config list account --format "value(core.account)") - - pip3 install --upgrade pip - pip3 install hail~{'==' + hail_version} - pip3 install --upgrade google-cloud-dataproc ijson - - if [[ -z "~{git_branch_or_tag}" && -z "~{submission_script}" ]] || [[ ! -z "~{git_branch_or_tag}" && ! -z "~{submission_script}" ]] - then - echo "Must specify git_branch_or_tag XOR submission_script" - exit 1 - elif [[ ! -z "~{git_branch_or_tag}" ]] - then - script_url="https://raw.githubusercontent.com/broadinstitute/gatk/~{git_branch_or_tag}/scripts/variantstore/wdl/extract/~{default_script_filename}" - curl --silent --location --remote-name "${script_url}" - fi - - if [[ ! -z "~{submission_script}" ]] - then - script_path="~{submission_script}" - else - script_path="~{default_script_filename}" - fi - - # Generate a UUIDish random hex string of <8 hex chars (4 bytes)>-<4 hex chars (2 bytes)> - hex="$(head -c4 < /dev/urandom | xxd -p)-$(head -c2 < /dev/urandom | xxd -p)" - - cluster_name="~{prefix}-~{contig}-hail-${hex}" - echo ${cluster_name} > cluster_name.txt - - python3 /app/run_in_hail_cluster.py \ - --script-path ${script_path} \ - --account ${account_name} \ - --num-workers ~{num_workers} \ - ~{'--worker-machine-type' + worker_machine_type} \ - --region ~{region} \ - --gcs-project ~{gcs_project} \ - --cluster-name ${cluster_name} \ - --prefix ~{prefix} \ - --contig ~{contig} \ - --vds-url ~{vds_url} \ - --vcf-header-url ~{vcf_header_url} \ - --bed-url ~{bed_url} - - echo "Complete" - >>> - - output { - String cluster_name = read_string("cluster_name.txt") - File vcf = "~{prefix}.~{contig}.vcf.bgz" - } - - runtime { - memory: select_first([runtime_override.mem_gb, runtime_default.mem_gb]) + " GB" - disks: "local-disk " + select_first([runtime_override.disk_gb, runtime_default.disk_gb]) + " SSD" - cpu: select_first([runtime_override.cpu_cores, runtime_default.cpu_cores]) - preemptible: select_first([runtime_override.preemptible_tries, runtime_default.preemptible_tries]) - maxRetries: select_first([runtime_override.max_retries, runtime_default.max_retries]) - docker: cloud_sdk_slim_docker - bootDiskSizeGb: select_first([runtime_override.boot_disk_gb, runtime_default.boot_disk_gb]) - } -} diff --git a/scripts/variantstore/wdl/ImportArrayManifest.wdl b/scripts/variantstore/wdl/old/ImportArrayManifest.wdl similarity index 99% rename from scripts/variantstore/wdl/ImportArrayManifest.wdl rename to scripts/variantstore/wdl/old/ImportArrayManifest.wdl index 6520571618d..7d9bf2d7c0c 100644 --- a/scripts/variantstore/wdl/ImportArrayManifest.wdl +++ b/scripts/variantstore/wdl/old/ImportArrayManifest.wdl @@ -1,6 +1,6 @@ version 1.0 -import "GvsUtils.wdl" as Utils +import "../GvsUtils.wdl" as Utils workflow ImportArrayManifest { diff --git a/scripts/variantstore/wdl/ImportArrays.wdl b/scripts/variantstore/wdl/old/ImportArrays.wdl similarity index 99% rename from scripts/variantstore/wdl/ImportArrays.wdl rename to scripts/variantstore/wdl/old/ImportArrays.wdl index 808473b89e7..b13098ed760 100644 --- a/scripts/variantstore/wdl/ImportArrays.wdl +++ b/scripts/variantstore/wdl/old/ImportArrays.wdl @@ -1,6 +1,6 @@ version 1.0 -import "GvsUtils.wdl" as Utils +import "../GvsUtils.wdl" as Utils workflow ImportArrays { diff --git a/scripts/variantstore/wdl/GvsBenchmarkExtractTask.wdl b/scripts/variantstore/wdl/test/GvsBenchmarkExtractTask.wdl similarity index 99% rename from scripts/variantstore/wdl/GvsBenchmarkExtractTask.wdl rename to scripts/variantstore/wdl/test/GvsBenchmarkExtractTask.wdl index 39c3f85630c..1a956ea5f03 100644 --- a/scripts/variantstore/wdl/GvsBenchmarkExtractTask.wdl +++ b/scripts/variantstore/wdl/test/GvsBenchmarkExtractTask.wdl @@ -1,6 +1,6 @@ version 1.0 -import "GvsUtils.wdl" as Utils +import "../GvsUtils.wdl" as Utils workflow GvsBenchmarkExtractTask { input { diff --git a/scripts/variantstore/wdl/GvsIngestTieout.wdl b/scripts/variantstore/wdl/test/GvsIngestTieout.wdl similarity index 97% rename from scripts/variantstore/wdl/GvsIngestTieout.wdl rename to scripts/variantstore/wdl/test/GvsIngestTieout.wdl index 68d8d8c6c26..bd4d74dc055 100644 --- a/scripts/variantstore/wdl/GvsIngestTieout.wdl +++ b/scripts/variantstore/wdl/test/GvsIngestTieout.wdl @@ -1,8 +1,8 @@ version 1.0 -import "GvsAssignIds.wdl" as GvsAssignIds -import "GvsImportGenomes.wdl" as GvsImportGenomes -import "GvsUtils.wdl" as Utils +import "../GvsAssignIds.wdl" as GvsAssignIds +import "../GvsImportGenomes.wdl" as GvsImportGenomes +import "../GvsUtils.wdl" as Utils workflow GvsIngestTieout { input { diff --git a/scripts/variantstore/wdl/GvsQuickstartHailIntegration.wdl b/scripts/variantstore/wdl/test/GvsQuickstartHailIntegration.wdl similarity index 98% rename from scripts/variantstore/wdl/GvsQuickstartHailIntegration.wdl rename to scripts/variantstore/wdl/test/GvsQuickstartHailIntegration.wdl index 85a006e55dd..a5758b72625 100644 --- a/scripts/variantstore/wdl/GvsQuickstartHailIntegration.wdl +++ b/scripts/variantstore/wdl/test/GvsQuickstartHailIntegration.wdl @@ -1,8 +1,8 @@ version 1.0 -import "GvsUtils.wdl" as Utils -import "GvsExtractAvroFilesForHail.wdl" as ExtractAvroFilesForHail -import "GvsCreateVDS.wdl" as CreateVds +import "../GvsUtils.wdl" as Utils +import "../GvsExtractAvroFilesForHail.wdl" as ExtractAvroFilesForHail +import "../GvsCreateVDS.wdl" as CreateVds import "GvsQuickstartVcfIntegration.wdl" as QuickstartVcfIntegration workflow GvsQuickstartHailIntegration { diff --git a/scripts/variantstore/wdl/GvsQuickstartIntegration.wdl b/scripts/variantstore/wdl/test/GvsQuickstartIntegration.wdl similarity index 99% rename from scripts/variantstore/wdl/GvsQuickstartIntegration.wdl rename to scripts/variantstore/wdl/test/GvsQuickstartIntegration.wdl index 9515fbefe55..f40e33223e2 100644 --- a/scripts/variantstore/wdl/GvsQuickstartIntegration.wdl +++ b/scripts/variantstore/wdl/test/GvsQuickstartIntegration.wdl @@ -2,8 +2,10 @@ version 1.0 import "GvsQuickstartVcfIntegration.wdl" as QuickstartVcfIntegration import "GvsQuickstartHailIntegration.wdl" as QuickstartHailIntegration -import "GvsJointVariantCalling.wdl" as JointVariantCalling -import "GvsUtils.wdl" as Utils +import "../GvsJointVariantCalling.wdl" as JointVariantCalling +import "../GvsUtils.wdl" as Utils + +# comment workflow GvsQuickstartIntegration { input { diff --git a/scripts/variantstore/wdl/GvsQuickstartVcfIntegration.wdl b/scripts/variantstore/wdl/test/GvsQuickstartVcfIntegration.wdl similarity index 99% rename from scripts/variantstore/wdl/GvsQuickstartVcfIntegration.wdl rename to scripts/variantstore/wdl/test/GvsQuickstartVcfIntegration.wdl index 375229db061..96932264a36 100644 --- a/scripts/variantstore/wdl/GvsQuickstartVcfIntegration.wdl +++ b/scripts/variantstore/wdl/test/GvsQuickstartVcfIntegration.wdl @@ -1,7 +1,7 @@ version 1.0 -import "GvsUtils.wdl" as Utils -import "GvsJointVariantCalling.wdl" as JointVariantCalling +import "../GvsUtils.wdl" as Utils +import "../GvsJointVariantCalling.wdl" as JointVariantCalling workflow GvsQuickstartVcfIntegration { input { diff --git a/scripts/variantstore/wdl/GvsTieoutPgenToVcf.wdl b/scripts/variantstore/wdl/test/GvsTieoutPgenToVcf.wdl similarity index 100% rename from scripts/variantstore/wdl/GvsTieoutPgenToVcf.wdl rename to scripts/variantstore/wdl/test/GvsTieoutPgenToVcf.wdl diff --git a/scripts/variantstore/wdl/GvsTieoutVcfMaxAltAlleles.wdl b/scripts/variantstore/wdl/test/GvsTieoutVcfMaxAltAlleles.wdl similarity index 100% rename from scripts/variantstore/wdl/GvsTieoutVcfMaxAltAlleles.wdl rename to scripts/variantstore/wdl/test/GvsTieoutVcfMaxAltAlleles.wdl