Skip to content

Commit

Permalink
fix in get_columns_for_import.py (column -> col)
Browse files Browse the repository at this point in the history
  • Loading branch information
mcovarr committed Jul 12, 2023
1 parent 2a0bf3b commit d628a8e
Show file tree
Hide file tree
Showing 11 changed files with 23 additions and 23 deletions.
8 changes: 4 additions & 4 deletions scripts/variantstore/wdl/GvsBulkIngestGenomes.wdl
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ task GetWorkspaceName {

>>>
runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
memory: "3 GB"
disks: "local-disk 10 HDD"
cpu: 1
Expand Down Expand Up @@ -213,7 +213,7 @@ task GetColumnNames {
>>>

runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
memory: "3 GB"
disks: "local-disk 10 HDD"
cpu: 1
Expand Down Expand Up @@ -242,7 +242,7 @@ task SplitBulkImportFofn {
>>>

runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
memory: "3 GB"
disks: "local-disk 200 HDD"
cpu: 1
Expand Down Expand Up @@ -293,7 +293,7 @@ task GenerateImportFofnFromDataTable {
--error-file-name ~{error_file_name}
>>>
runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
memory: "3 GB"
disks: "local-disk 200 HDD"
cpu: 1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,7 @@ task Add_AS_MAX_VQS_SCORE_ToVcf {
File input_vcf
String output_basename

String docker = "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
String docker = "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
Int cpu = 1
Int memory_mb = 3500
Int disk_size_gb = ceil(2*size(input_vcf, "GiB")) + 50
Expand Down
2 changes: 1 addition & 1 deletion scripts/variantstore/wdl/GvsCallsetCost.wdl
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ task WorkflowComputeCosts {
>>>

runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
}

output {
Expand Down
10 changes: 5 additions & 5 deletions scripts/variantstore/wdl/GvsCreateVATfromVDS.wdl
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ task MakeSubpopulationFilesAndReadSchemaFiles {
# ------------------------------------------------
# Runtime settings:
runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
memory: "1 GB"
preemptible: 3
cpu: "1"
Expand Down Expand Up @@ -212,7 +212,7 @@ task StripCustomAnnotationsFromSitesOnlyVCF {
# ------------------------------------------------
# Runtime settings:
runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
memory: "7 GiB"
cpu: "2"
preemptible: 3
Expand Down Expand Up @@ -297,7 +297,7 @@ task RemoveDuplicatesFromSitesOnlyVCF {
# ------------------------------------------------
# Runtime settings:
runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
maxRetries: 3
memory: "16 GB"
preemptible: 3
Expand Down Expand Up @@ -457,7 +457,7 @@ task PrepVtAnnotationJson {
# ------------------------------------------------
# Runtime settings:
runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
memory: "7 GB"
preemptible: 3
cpu: "1"
Expand Down Expand Up @@ -503,7 +503,7 @@ task PrepGenesAnnotationJson {
# ------------------------------------------------
# Runtime settings:
runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
memory: "7 GB"
preemptible: 3
cpu: "1"
Expand Down
6 changes: 3 additions & 3 deletions scripts/variantstore/wdl/GvsExtractAvroFilesForHail.wdl
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ task ExtractFromNonSuperpartitionedTables {
}

runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
disks: "local-disk 500 HDD"
}
}
Expand Down Expand Up @@ -244,7 +244,7 @@ task ExtractFromSuperpartitionedTables {
}

runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
disks: "local-disk 500 HDD"
}
}
Expand Down Expand Up @@ -312,7 +312,7 @@ task GenerateHailScripts {
File hail_create_vat_inputs_script = 'hail_create_vat_inputs.py'
}
runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
disks: "local-disk 500 HDD"
}
}
4 changes: 2 additions & 2 deletions scripts/variantstore/wdl/GvsImportGenomes.wdl
Original file line number Diff line number Diff line change
Expand Up @@ -339,7 +339,7 @@ task ProcessVCFHeaders {
>>>

runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
disks: "local-disk 500 HDD"
}
}
Expand Down Expand Up @@ -510,7 +510,7 @@ task CurateInputLists {
--vcf_index_list_file_name ~{input_vcf_index_list}
>>>
runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
memory: "3 GB"
disks: "local-disk 100 HDD"
bootDiskSizeGb: 15
Expand Down
2 changes: 1 addition & 1 deletion scripts/variantstore/wdl/GvsPopulateAltAllele.wdl
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ task PopulateAltAlleleTable {
done
>>>
runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
memory: "3 GB"
disks: "local-disk 10 HDD"
cpu: 1
Expand Down
2 changes: 1 addition & 1 deletion scripts/variantstore/wdl/GvsPrepareRangesCallset.wdl
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ task PrepareRangesCallsetTask {
}

runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
memory: "3 GB"
disks: "local-disk 100 HDD"
bootDiskSizeGb: 15
Expand Down
2 changes: 1 addition & 1 deletion scripts/variantstore/wdl/GvsQuickstartIntegration.wdl
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ task FilterIntervalListChromosomes {
--output-interval-list "filtered.interval_list" --chromosome ~{sep=' --chromosome ' chromosomes}
>>>
runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
}
output {
File out = "filtered.interval_list"
Expand Down
4 changes: 2 additions & 2 deletions scripts/variantstore/wdl/GvsUtils.wdl
Original file line number Diff line number Diff line change
Expand Up @@ -509,7 +509,7 @@ task ScaleXYBedValues {
}
runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
maxRetries: 3
memory: "7 GB"
preemptible: 3
Expand Down Expand Up @@ -854,7 +854,7 @@ task SummarizeTaskMonitorLogs {
# ------------------------------------------------
# Runtime settings:
runtime {
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-bf93ad833"
docker: "us.gcr.io/broad-dsde-methods/variantstore:2023-07-12-alpine-0b2555b44"
memory: "1 GB"
preemptible: 3
cpu: "1"
Expand Down
4 changes: 2 additions & 2 deletions scripts/variantstore/wdl/extract/get_columns_for_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,13 +243,13 @@ def get_column_values(columnSamples, numSamples, user_defined_vcf, user_defined_
# Check the contents of the columns: the duck algorithm. If its contents LOOK like vcfs and indexes, go from there
for col in path_ends_in_vcf_gz:
# ...and has an analogue that looks like an index file?
index_column = f"{column}_index"
index_column = f"{col}_index"
# is this the correct logic? Or do we just want to look for ANY singular column that has contents that
# look liks a vcf and ANY other singular column with contents that look like an index file? Stick with
# enforcing a naming convention for now...
if index_column in path_ends_in_vcf_gz_tbi:
# woohoo!
final_vcf_column = column
final_vcf_column = col
final_vcf_index_column = index_column
found_vcf_column = True

Expand Down

0 comments on commit d628a8e

Please sign in to comment.