diff --git a/lib/alpn-boot-7.1.3.v20150130.jar b/lib/alpn-boot-7.1.3.v20150130.jar new file mode 100644 index 0000000..b754518 Binary files /dev/null and b/lib/alpn-boot-7.1.3.v20150130.jar differ diff --git a/lib/alpn-boot-8.1.3.v20150130.jar b/lib/alpn-boot-8.1.3.v20150130.jar new file mode 100644 index 0000000..624b03b Binary files /dev/null and b/lib/alpn-boot-8.1.3.v20150130.jar differ diff --git a/pom.xml b/pom.xml index 9cef938..f570ccf 100644 --- a/pom.xml +++ b/pom.xml @@ -127,7 +127,7 @@ com.google.cloud.genomics google-genomics-utils - v1beta2-0.37 + v1beta2-0.38 @@ -233,6 +233,24 @@ + + java7profile + + [1.7, 1.8) + + + ${basedir}/lib/alpn-boot-7.1.3.v20150130.jar + + + + java8profile + + [1.8, 1.9) + + + ${basedir}/lib/alpn-boot-8.1.3.v20150130.jar + + @@ -304,6 +322,7 @@ 2.18.1 false + -Xbootclasspath/p:${alpn.jar} diff --git a/src/main/java/com/google/cloud/genomics/dataflow/functions/AlleleSimilarityCalculator.java b/src/main/java/com/google/cloud/genomics/dataflow/functions/AlleleSimilarityCalculator.java index 79edaf6..1283c5f 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/functions/AlleleSimilarityCalculator.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/functions/AlleleSimilarityCalculator.java @@ -16,15 +16,15 @@ import java.util.HashMap; import java.util.Map; -import com.google.api.services.genomics.model.Call; -import com.google.api.services.genomics.model.Variant; import com.google.cloud.dataflow.sdk.transforms.DoFn; import com.google.cloud.dataflow.sdk.values.KV; import com.google.cloud.genomics.dataflow.utils.CallFilters; import com.google.cloud.genomics.dataflow.utils.PairGenerator; -import com.google.cloud.genomics.utils.VariantUtils; +import com.google.cloud.genomics.utils.grpc.VariantUtils; import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; +import com.google.genomics.v1.Variant; +import com.google.genomics.v1.VariantCall; /** * For each pair of calls on any of the given variants, computes a score, a number between 0 and 1 @@ -55,14 +55,14 @@ public void processElement(ProcessContext context) { Variant variant = context.element(); CallSimilarityCalculator callSimilarityCalculator = callSimilarityCalculatorFactory.get(isReferenceMajor(variant)); - for (KV pair : PairGenerator.WITHOUT_REPLACEMENT.allPairs( + for (KV pair : PairGenerator.WITHOUT_REPLACEMENT.allPairs( getSamplesWithVariant(variant), VariantUtils.CALL_COMPARATOR)) { accumulateCallSimilarity(callSimilarityCalculator, pair.getKey(), pair.getValue()); } } private void accumulateCallSimilarity(CallSimilarityCalculator callSimilarityCalculator, - Call call1, Call call2) { + VariantCall call1, VariantCall call2) { KV callPair = KV.of(call1.getCallSetName(), call2.getCallSetName()); KV callPairAccumulation = accumulator.get(callPair); if (callPairAccumulation == null) { @@ -79,15 +79,15 @@ public void finishBundle(Context context) { output(context, accumulator); } - static ImmutableList getSamplesWithVariant(Variant variant) { + static ImmutableList getSamplesWithVariant(Variant variant) { return CallFilters.getSamplesWithVariantOfMinGenotype(variant, 0); } static boolean isReferenceMajor(Variant variant) { int referenceAlleles = 0; int alternateAlleles = 0; - for (Call call : variant.getCalls()) { - for (Integer i : call.getGenotype()) { + for (VariantCall call : variant.getCallsList()) { + for (Integer i : call.getGenotypeList()) { if (i == 0) { ++referenceAlleles; } else if (i > 0) { diff --git a/src/main/java/com/google/cloud/genomics/dataflow/functions/CallSimilarityCalculator.java b/src/main/java/com/google/cloud/genomics/dataflow/functions/CallSimilarityCalculator.java index b95e721..15d87fe 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/functions/CallSimilarityCalculator.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/functions/CallSimilarityCalculator.java @@ -15,10 +15,10 @@ */ package com.google.cloud.genomics.dataflow.functions; -import com.google.api.services.genomics.model.Call; +import com.google.genomics.v1.VariantCall; public interface CallSimilarityCalculator { - double similarity(Call call1, Call call2); + double similarity(VariantCall call1, VariantCall call2); } diff --git a/src/main/java/com/google/cloud/genomics/dataflow/functions/JoinNonVariantSegmentsWithVariants.java b/src/main/java/com/google/cloud/genomics/dataflow/functions/JoinNonVariantSegmentsWithVariants.java index 0789ac8..e68ed9a 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/functions/JoinNonVariantSegmentsWithVariants.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/functions/JoinNonVariantSegmentsWithVariants.java @@ -31,7 +31,7 @@ import com.google.cloud.dataflow.sdk.values.PCollection; import com.google.cloud.genomics.dataflow.readers.VariantReader; import com.google.cloud.genomics.dataflow.utils.GenomicsDatasetOptions; -import com.google.cloud.genomics.utils.GenomicsFactory; +import com.google.cloud.genomics.utils.OfflineAuth; import com.google.cloud.genomics.utils.ShardBoundary; import com.google.cloud.genomics.utils.VariantUtils; import com.google.common.base.Function; @@ -71,7 +71,7 @@ public class JoinNonVariantSegmentsWithVariants { * merged into the variants with which they overlap. */ public static PCollection joinVariantsTransform( - PCollection input, GenomicsFactory.OfflineAuth auth) { + PCollection input, OfflineAuth auth) { return joinVariants(input, auth, null); } @@ -87,7 +87,7 @@ public static PCollection joinVariantsTransform( * merged into the variants with which they overlap. */ public static PCollection joinVariantsTransform( - PCollection input, GenomicsFactory.OfflineAuth auth, String fields) { + PCollection input, OfflineAuth auth, String fields) { for (String field : REQUIRED_FIELDS) { Preconditions .checkArgument( @@ -99,7 +99,7 @@ public static PCollection joinVariantsTransform( } private static PCollection joinVariants(PCollection input, - GenomicsFactory.OfflineAuth auth, String fields) { + OfflineAuth auth, String fields) { return input .apply( ParDo.named(VariantReader.class.getSimpleName()).of( diff --git a/src/main/java/com/google/cloud/genomics/dataflow/functions/SharedAllelesRatioCalculator.java b/src/main/java/com/google/cloud/genomics/dataflow/functions/SharedAllelesRatioCalculator.java index 136bf47..f5d7a1a 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/functions/SharedAllelesRatioCalculator.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/functions/SharedAllelesRatioCalculator.java @@ -15,7 +15,7 @@ */ package com.google.cloud.genomics.dataflow.functions; -import com.google.api.services.genomics.model.Call; +import com.google.genomics.v1.VariantCall; /** * See this @@ -27,15 +27,15 @@ public class SharedAllelesRatioCalculator implements CallSimilarityCalculator { // TODO: Double check that the following is the right way of computing the IBS // scores when the number of alleles is different than 2 and when the genotypes are unphased. @Override - public double similarity(Call call1, Call call2) { - int minNumberOfGenotypes = Math.min(call1.getGenotype().size(), call2.getGenotype().size()); + public double similarity(VariantCall call1, VariantCall call2) { + int minNumberOfGenotypes = Math.min(call1.getGenotypeCount(), call2.getGenotypeCount()); int numberOfSharedAlleles = 0; for (int i = 0; i < minNumberOfGenotypes; ++i) { - if (call1.getGenotype().get(i) == call2.getGenotype().get(i)) { + if (call1.getGenotype(i) == call2.getGenotype(i)) { ++numberOfSharedAlleles; } } - int maxNumberOfGenotypes = Math.max(call1.getGenotype().size(), call2.getGenotype().size()); + int maxNumberOfGenotypes = Math.max(call1.getGenotypeCount(), call2.getGenotypeCount()); return (double) numberOfSharedAlleles / maxNumberOfGenotypes; } diff --git a/src/main/java/com/google/cloud/genomics/dataflow/functions/SharedMinorAllelesCalculator.java b/src/main/java/com/google/cloud/genomics/dataflow/functions/SharedMinorAllelesCalculator.java index 6b385c9..c6e86cc 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/functions/SharedMinorAllelesCalculator.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/functions/SharedMinorAllelesCalculator.java @@ -15,9 +15,9 @@ */ package com.google.cloud.genomics.dataflow.functions; -import com.google.api.services.genomics.model.Call; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; +import com.google.genomics.v1.VariantCall; /** * Computes the similarity of two calls based on whether they share a minor allele or not. @@ -30,8 +30,8 @@ public SharedMinorAllelesCalculator(boolean isReferenceMajor) { this.isReferenceMajor = isReferenceMajor; } - private boolean hasMinorAllele(Call call) { - return Iterables.any(call.getGenotype(), new Predicate() { + private boolean hasMinorAllele(VariantCall call) { + return Iterables.any(call.getGenotypeList(), new Predicate() { @Override public boolean apply(Integer genotype) { @@ -46,7 +46,7 @@ public boolean apply(Integer genotype) { } @Override - public double similarity(Call call1, Call call2) { + public double similarity(VariantCall call1, VariantCall call2) { if (call1.getCallSetName().equals(call2.getCallSetName())) { return 1.0; } diff --git a/src/main/java/com/google/cloud/genomics/dataflow/pipelines/AnnotateVariants.java b/src/main/java/com/google/cloud/genomics/dataflow/pipelines/AnnotateVariants.java index e09c5b6..d953ba4 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/pipelines/AnnotateVariants.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/pipelines/AnnotateVariants.java @@ -48,6 +48,7 @@ import com.google.cloud.genomics.dataflow.utils.GenomicsDatasetOptions; import com.google.cloud.genomics.dataflow.utils.GenomicsOptions; import com.google.cloud.genomics.utils.GenomicsFactory; +import com.google.cloud.genomics.utils.OfflineAuth; import com.google.cloud.genomics.utils.Paginator; import com.google.cloud.genomics.utils.ShardBoundary; import com.google.cloud.genomics.utils.ShardUtils; @@ -87,11 +88,11 @@ public final class AnnotateVariants extends DoFn callSetIds, transcriptSetIds, variantAnnotationSetIds; private final Map, String> refBaseCache; - public AnnotateVariants(GenomicsFactory.OfflineAuth auth, + public AnnotateVariants(OfflineAuth auth, List callSetIds, List transcriptSetIds, List variantAnnotationSetIds) { this.auth = auth; @@ -104,7 +105,7 @@ public AnnotateVariants(GenomicsFactory.OfflineAuth auth, @Override public void processElement( DoFn>.ProcessContext c) throws Exception { - Genomics genomics = auth.getGenomics(auth.getDefaultFactory()); + Genomics genomics = GenomicsFactory.builder().build().fromOfflineAuth(auth); SearchVariantsRequest request = c.element(); LOG.info("processing contig " + request); @@ -258,8 +259,8 @@ public static void main(String[] args) throws Exception { // Option validation is not yet automatic, we make an explicit call here. GenomicsDatasetOptions.Methods.validateOptions(opts); - GenomicsFactory.OfflineAuth auth = GenomicsOptions.Methods.getGenomicsAuth(opts); - Genomics genomics = auth.getGenomics(auth.getDefaultFactory()); + OfflineAuth auth = GenomicsOptions.Methods.getGenomicsAuth(opts); + Genomics genomics = GenomicsFactory.builder().build().fromOfflineAuth(auth); List callSetIds = ImmutableList.of(); if (!Strings.isNullOrEmpty(opts.getCallSetIds().trim())) { diff --git a/src/main/java/com/google/cloud/genomics/dataflow/pipelines/CalculateCoverage.java b/src/main/java/com/google/cloud/genomics/dataflow/pipelines/CalculateCoverage.java index 10f0307..427d7ff 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/pipelines/CalculateCoverage.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/pipelines/CalculateCoverage.java @@ -48,6 +48,7 @@ import com.google.cloud.genomics.utils.Contig; import com.google.cloud.genomics.utils.GenomicsFactory; import com.google.cloud.genomics.utils.GenomicsUtils; +import com.google.cloud.genomics.utils.OfflineAuth; import com.google.cloud.genomics.utils.RetryPolicy; import com.google.cloud.genomics.utils.ShardBoundary; import com.google.cloud.genomics.utils.ShardUtils.SexChromosomeFilter; @@ -76,7 +77,7 @@ public class CalculateCoverage { private static CoverageOptions options; private static Pipeline p; - private static GenomicsFactory.OfflineAuth auth; + private static OfflineAuth auth; /** * Options required to run this pipeline. @@ -192,10 +193,10 @@ public static void main(String[] args) throws GeneralSecurityException, IOExcept static class CheckMatchingReferenceSet extends DoFn { private final String referenceSetIdForAllReadGroupSets; - private final GenomicsFactory.OfflineAuth auth; + private final OfflineAuth auth; public CheckMatchingReferenceSet(String referenceSetIdForAllReadGroupSets, - GenomicsFactory.OfflineAuth auth) { + OfflineAuth auth) { this.referenceSetIdForAllReadGroupSets = referenceSetIdForAllReadGroupSets; this.auth = auth; } @@ -396,11 +397,11 @@ static class CreateAnnotations extends DoFn>>>, Annotation> { private final String asId; - private final GenomicsFactory.OfflineAuth auth; + private final OfflineAuth auth; private final List currAnnotations; private final boolean write; - public CreateAnnotations(String asId, GenomicsFactory.OfflineAuth auth, boolean write) { + public CreateAnnotations(String asId, OfflineAuth auth, boolean write) { this.asId = asId; this.auth = auth; this.currAnnotations = Lists.newArrayList(); @@ -446,8 +447,8 @@ public void finishBundle(Context c) throws IOException, GeneralSecurityException } private void batchCreateAnnotations() throws IOException, GeneralSecurityException { - Genomics.Annotations.BatchCreate aRequest = auth.getGenomics(auth.getDefaultFactory()) - .annotations().batchCreate( + Genomics genomics = GenomicsFactory.builder().build().fromOfflineAuth(auth); + Genomics.Annotations.BatchCreate aRequest = genomics.annotations().batchCreate( new BatchCreateAnnotationsRequest().setAnnotations(currAnnotations)); RetryPolicy retryP = RetryPolicy.nAttempts(4); retryP.execute(aRequest); @@ -477,8 +478,8 @@ private static AnnotationSet createAnnotationSet(String referenceSetId) } as.setReferenceSetId(referenceSetId); as.setType("GENERIC"); - Genomics.AnnotationSets.Create asRequest = auth.getGenomics(auth.getDefaultFactory()) - .annotationSets().create(as); + Genomics genomics = GenomicsFactory.builder().build().fromOfflineAuth(auth); + Genomics.AnnotationSets.Create asRequest = genomics.annotationSets().create(as); AnnotationSet asWithId = asRequest.execute(); return asWithId; } diff --git a/src/main/java/com/google/cloud/genomics/dataflow/pipelines/CountReads.java b/src/main/java/com/google/cloud/genomics/dataflow/pipelines/CountReads.java index dd7b161..a5c30b1 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/pipelines/CountReads.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/pipelines/CountReads.java @@ -47,7 +47,7 @@ import com.google.cloud.genomics.dataflow.utils.GenomicsDatasetOptions; import com.google.cloud.genomics.dataflow.utils.GenomicsOptions; import com.google.cloud.genomics.utils.Contig; -import com.google.cloud.genomics.utils.GenomicsFactory; +import com.google.cloud.genomics.utils.OfflineAuth; import com.google.cloud.genomics.utils.ShardBoundary; import com.google.cloud.genomics.utils.ShardUtils; import com.google.common.base.Strings; @@ -63,7 +63,7 @@ public class CountReads { private static final Logger LOG = Logger.getLogger(CountReads.class.getName()); private static CountReadsOptions options; private static Pipeline p; - private static GenomicsFactory.OfflineAuth auth; + private static OfflineAuth auth; public static interface CountReadsOptions extends GenomicsDatasetOptions, GCSOptions { @Description("The ID of the Google Genomics ReadGroupSet this pipeline is working with. " diff --git a/src/main/java/com/google/cloud/genomics/dataflow/pipelines/IdentityByState.java b/src/main/java/com/google/cloud/genomics/dataflow/pipelines/IdentityByState.java index e22a0be..044a89e 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/pipelines/IdentityByState.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/pipelines/IdentityByState.java @@ -17,8 +17,6 @@ import java.security.GeneralSecurityException; import java.util.List; -import com.google.api.services.genomics.model.SearchVariantsRequest; -import com.google.api.services.genomics.model.Variant; import com.google.cloud.dataflow.sdk.Pipeline; import com.google.cloud.dataflow.sdk.io.TextIO; import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory; @@ -32,14 +30,17 @@ import com.google.cloud.genomics.dataflow.functions.CallSimilarityCalculatorFactory; import com.google.cloud.genomics.dataflow.functions.FormatIBSData; import com.google.cloud.genomics.dataflow.functions.IBSCalculator; -import com.google.cloud.genomics.dataflow.functions.JoinNonVariantSegmentsWithVariants; +import com.google.cloud.genomics.dataflow.functions.grpc.JoinNonVariantSegmentsWithVariants; import com.google.cloud.genomics.dataflow.readers.VariantReader; +import com.google.cloud.genomics.dataflow.readers.VariantStreamer; import com.google.cloud.genomics.dataflow.utils.GenomicsDatasetOptions; import com.google.cloud.genomics.dataflow.utils.GenomicsOptions; import com.google.cloud.genomics.dataflow.utils.IdentityByStateOptions; -import com.google.cloud.genomics.utils.GenomicsFactory; +import com.google.cloud.genomics.utils.OfflineAuth; import com.google.cloud.genomics.utils.ShardBoundary; import com.google.cloud.genomics.utils.ShardUtils; +import com.google.genomics.v1.StreamVariantsRequest; +import com.google.genomics.v1.Variant; /** * A pipeline that computes Identity by State (IBS) for each pair of individuals in a dataset. @@ -49,8 +50,8 @@ */ public class IdentityByState { - private static final String VARIANT_FIELDS = - "nextPageToken,variants(start,calls(genotype,callSetName))"; + // TODO: https://github.com/googlegenomics/utils-java/issues/48 + private static final String VARIANT_FIELDS = "variants(start,calls(genotype,callSetName))"; public static void main(String[] args) throws IOException, GeneralSecurityException, InstantiationException, IllegalAccessException { @@ -61,27 +62,29 @@ public static void main(String[] args) throws IOException, GeneralSecurityExcept // Option validation is not yet automatic, we make an explicit call here. GenomicsDatasetOptions.Methods.validateOptions(options); - GenomicsFactory.OfflineAuth auth = GenomicsOptions.Methods.getGenomicsAuth(options); - List requests = options.isAllReferences() ? - ShardUtils.getPaginatedVariantRequests(options.getDatasetId(), ShardUtils.SexChromosomeFilter.EXCLUDE_XY, + OfflineAuth auth = GenomicsOptions.Methods.getGenomicsAuth(options); + List requests = options.isAllReferences() ? + ShardUtils.getVariantRequests(options.getDatasetId(), ShardUtils.SexChromosomeFilter.EXCLUDE_XY, options.getBasesPerShard(), auth) : - ShardUtils.getPaginatedVariantRequests(options.getDatasetId(), options.getReferences(), options.getBasesPerShard()); + ShardUtils.getVariantRequests(options.getDatasetId(), options.getReferences(), options.getBasesPerShard()); Pipeline p = Pipeline.create(options); p.getCoderRegistry().setFallbackCoderProvider(GenericJsonCoder.PROVIDER); - PCollection input = p.begin().apply(Create.of(requests)); + PCollection variants = p.begin() + .apply(Create.of(requests)) + .apply(new VariantStreamer(auth, ShardBoundary.Requirement.STRICT, VARIANT_FIELDS)); - PCollection variants = - options.getHasNonVariantSegments() + PCollection processedVariants; + if(options.getHasNonVariantSegments()) { // Special handling is needed for data with non-variant segment records since IBS must // take into account reference-matches in addition to the variants (unlike // other analyses such as PCA). - ? JoinNonVariantSegmentsWithVariants.joinVariantsTransform(input, auth, - JoinNonVariantSegmentsWithVariants.VARIANT_JOIN_FIELDS) : input.apply(ParDo.named( - VariantReader.class.getSimpleName()).of( - new VariantReader(auth, ShardBoundary.Requirement.STRICT, VARIANT_FIELDS))); + processedVariants = JoinNonVariantSegmentsWithVariants.joinVariantsTransform(variants); + } else { + processedVariants = variants; + } - variants + processedVariants .apply( ParDo.named(AlleleSimilarityCalculator.class.getSimpleName()).of( new AlleleSimilarityCalculator(getCallSimilarityCalculatorFactory(options)))) diff --git a/src/main/java/com/google/cloud/genomics/dataflow/pipelines/ShardedBAMWriting.java b/src/main/java/com/google/cloud/genomics/dataflow/pipelines/ShardedBAMWriting.java index cbf4735..6764515 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/pipelines/ShardedBAMWriting.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/pipelines/ShardedBAMWriting.java @@ -37,7 +37,7 @@ import com.google.cloud.genomics.dataflow.utils.ShardReadsTransform; import com.google.cloud.genomics.dataflow.writers.WriteReadsTransform; import com.google.cloud.genomics.utils.Contig; -import com.google.cloud.genomics.utils.GenomicsFactory; +import com.google.cloud.genomics.utils.OfflineAuth; import com.google.common.collect.Lists; import htsjdk.samtools.SAMFileHeader; @@ -72,7 +72,7 @@ static interface Options extends ShardReadsTransform.Options, WriteReadsTransfor private static final int MAX_FILES_FOR_COMPOSE = 32; private static Options options; private static Pipeline pipeline; - private static GenomicsFactory.OfflineAuth auth; + private static OfflineAuth auth; private static Iterable contigs; public static void main(String[] args) throws GeneralSecurityException, IOException { diff --git a/src/main/java/com/google/cloud/genomics/dataflow/pipelines/TransmissionProbability.java b/src/main/java/com/google/cloud/genomics/dataflow/pipelines/TransmissionProbability.java index 9326794..a3d065c 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/pipelines/TransmissionProbability.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/pipelines/TransmissionProbability.java @@ -35,7 +35,7 @@ import com.google.cloud.genomics.dataflow.readers.VariantReader; import com.google.cloud.genomics.dataflow.utils.GenomicsDatasetOptions; import com.google.cloud.genomics.dataflow.utils.GenomicsOptions; -import com.google.cloud.genomics.utils.GenomicsFactory; +import com.google.cloud.genomics.utils.OfflineAuth; import com.google.cloud.genomics.utils.ShardBoundary; import com.google.cloud.genomics.utils.ShardUtils; @@ -55,7 +55,7 @@ public static void main(String[] args) throws IOException, GeneralSecurityExcept // Option validation is not yet automatic, we make an explicit call here. GenomicsDatasetOptions.Methods.validateOptions(options); - GenomicsFactory.OfflineAuth auth = GenomicsOptions.Methods.getGenomicsAuth(options); + OfflineAuth auth = GenomicsOptions.Methods.getGenomicsAuth(options); List requests = options.isAllReferences() ? ShardUtils.getPaginatedVariantRequests(options.getDatasetId(), ShardUtils.SexChromosomeFilter.EXCLUDE_XY, options.getBasesPerShard(), auth) : diff --git a/src/main/java/com/google/cloud/genomics/dataflow/pipelines/VariantSimilarity.java b/src/main/java/com/google/cloud/genomics/dataflow/pipelines/VariantSimilarity.java index 20545c9..0a43332 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/pipelines/VariantSimilarity.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/pipelines/VariantSimilarity.java @@ -37,8 +37,8 @@ import com.google.cloud.genomics.dataflow.utils.GCSOptions; import com.google.cloud.genomics.dataflow.utils.GenomicsDatasetOptions; import com.google.cloud.genomics.dataflow.utils.GenomicsOptions; -import com.google.cloud.genomics.utils.GenomicsFactory; import com.google.cloud.genomics.utils.GenomicsUtils; +import com.google.cloud.genomics.utils.OfflineAuth; import com.google.cloud.genomics.utils.ShardBoundary; import com.google.cloud.genomics.utils.ShardUtils; import com.google.common.collect.BiMap; @@ -56,8 +56,8 @@ public class VariantSimilarity { = "nextPageToken,variants(start,calls(genotype,callSetName))"; public static interface VariantSimilarityOptions extends GenomicsDatasetOptions, GCSOptions { - @Description("Whether to use the gRPC API endpoint for variants. Defaults to 'false';") - @Default.Boolean(false) + @Description("Whether to use the gRPC API endpoint for variants. Defaults to 'true';") + @Default.Boolean(true) Boolean getUseGrpc(); void setUseGrpc(Boolean value); @@ -71,7 +71,7 @@ public static void main(String[] args) throws IOException, GeneralSecurityExcept // Option validation is not yet automatic, we make an explicit call here. GenomicsDatasetOptions.Methods.validateOptions(options); - GenomicsFactory.OfflineAuth auth = GenomicsOptions.Methods.getGenomicsAuth(options); + OfflineAuth auth = GenomicsOptions.Methods.getGenomicsAuth(options); List callSetNames = GenomicsUtils.getCallSetsNames(options.getDatasetId() , auth); Collections.sort(callSetNames); // Ensure a stable sort order for reproducible results. diff --git a/src/main/java/com/google/cloud/genomics/dataflow/pipelines/VerifyBamId.java b/src/main/java/com/google/cloud/genomics/dataflow/pipelines/VerifyBamId.java index 4ac1c7e..1bb9b44 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/pipelines/VerifyBamId.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/pipelines/VerifyBamId.java @@ -58,8 +58,8 @@ import com.google.cloud.genomics.dataflow.utils.ReadFunctions; import com.google.cloud.genomics.dataflow.utils.Solver; import com.google.cloud.genomics.dataflow.utils.VariantFunctions; -import com.google.cloud.genomics.utils.GenomicsFactory; import com.google.cloud.genomics.utils.GenomicsUtils; +import com.google.cloud.genomics.utils.OfflineAuth; import com.google.cloud.genomics.utils.ShardBoundary; import com.google.cloud.genomics.utils.ShardUtils; import com.google.cloud.genomics.utils.ShardUtils.SexChromosomeFilter; @@ -93,7 +93,7 @@ public class VerifyBamId { private static VerifyBamId.VerifyBamIdOptions options; private static Pipeline p; - private static GenomicsFactory.OfflineAuth auth; + private static OfflineAuth auth; /** * String prefix used for sampling hash function diff --git a/src/main/java/com/google/cloud/genomics/dataflow/readers/GenomicsApiReader.java b/src/main/java/com/google/cloud/genomics/dataflow/readers/GenomicsApiReader.java index 6c9e2b9..8067cd3 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/readers/GenomicsApiReader.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/readers/GenomicsApiReader.java @@ -15,30 +15,29 @@ */ package com.google.cloud.genomics.dataflow.readers; +import java.util.logging.Logger; + import com.google.api.client.json.GenericJson; import com.google.api.services.genomics.Genomics; import com.google.cloud.dataflow.sdk.transforms.Aggregator; import com.google.cloud.dataflow.sdk.transforms.DoFn; import com.google.cloud.dataflow.sdk.transforms.Sum; import com.google.cloud.genomics.utils.GenomicsFactory; - -import java.io.IOException; -import java.security.GeneralSecurityException; -import java.util.logging.Logger; +import com.google.cloud.genomics.utils.OfflineAuth; public abstract class GenomicsApiReader extends DoFn { private static final Logger LOG = Logger.getLogger(GenomicsApiReader.class.getName()); // Used for access to the genomics API - protected final GenomicsFactory.OfflineAuth auth; + protected final OfflineAuth auth; protected final String fields; protected Aggregator initializedRequestsCount; protected Aggregator unsuccessfulResponsesCount; protected Aggregator ioExceptionsCount; protected Aggregator itemCount; - public GenomicsApiReader(GenomicsFactory.OfflineAuth auth, String fields) { + public GenomicsApiReader(OfflineAuth auth, String fields) { this.auth = auth; this.fields = fields; initializedRequestsCount = createAggregator("Genomics API Initialized Request Count", new Sum.SumIntegerFn()); @@ -49,24 +48,18 @@ public GenomicsApiReader(GenomicsFactory.OfflineAuth auth, String fields) { @Override public void processElement(ProcessContext c) { - try { - GenomicsFactory factory = auth.getDefaultFactory(); - processApiCall(auth.getGenomics(factory), c, c.element()); - - initializedRequestsCount.addValue(factory.initializedRequestsCount()); - unsuccessfulResponsesCount.addValue(factory.unsuccessfulResponsesCount()); - ioExceptionsCount.addValue(factory.ioExceptionsCount()); - LOG.info("ApiReader processed " + factory.initializedRequestsCount() + " requests (" - + factory.unsuccessfulResponsesCount() + " server errors and " - + factory.ioExceptionsCount() + " IO exceptions)"); + GenomicsFactory factory = GenomicsFactory.builder().build(); + Genomics genomics = factory.fromOfflineAuth(auth); + processApiCall(genomics, c, c.element()); - } catch (IOException | GeneralSecurityException e) { - throw new RuntimeException( - "Failed to create genomics API request - this shouldn't happen.", e); - } + initializedRequestsCount.addValue(factory.initializedRequestsCount()); + unsuccessfulResponsesCount.addValue(factory.unsuccessfulResponsesCount()); + ioExceptionsCount.addValue(factory.ioExceptionsCount()); + LOG.info("ApiReader processed " + factory.initializedRequestsCount() + " requests (" + + factory.unsuccessfulResponsesCount() + " server errors and " + + factory.ioExceptionsCount() + " IO exceptions)"); } - protected abstract void processApiCall(Genomics genomics, ProcessContext c, I element) - throws IOException; + protected abstract void processApiCall(Genomics genomics, ProcessContext c, I element); } diff --git a/src/main/java/com/google/cloud/genomics/dataflow/readers/ReadGroupStreamer.java b/src/main/java/com/google/cloud/genomics/dataflow/readers/ReadGroupStreamer.java index 4d87d31..3ece5c9 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/readers/ReadGroupStreamer.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/readers/ReadGroupStreamer.java @@ -21,7 +21,7 @@ import com.google.cloud.dataflow.sdk.transforms.ParDo; import com.google.cloud.dataflow.sdk.values.PCollection; import com.google.cloud.genomics.dataflow.utils.GenomicsDatasetOptions; -import com.google.cloud.genomics.utils.GenomicsFactory; +import com.google.cloud.genomics.utils.OfflineAuth; import com.google.cloud.genomics.utils.ShardBoundary; import com.google.cloud.genomics.utils.ShardUtils; import com.google.cloud.genomics.utils.ShardUtils.SexChromosomeFilter; @@ -36,7 +36,7 @@ * potentially be larger than Dataflow's pipeline creation request size limit. */ public class ReadGroupStreamer extends PTransform, PCollection> { - protected final GenomicsFactory.OfflineAuth auth; + protected final OfflineAuth auth; protected final ShardBoundary.Requirement shardBoundary; protected final String fields; protected final SexChromosomeFilter sexChromosomeFilter; @@ -50,7 +50,7 @@ public class ReadGroupStreamer extends PTransform, PCollecti * @param sexChromosomeFilter An enum value indicating how sex chromosomes should be * handled in the result. */ - public ReadGroupStreamer(GenomicsFactory.OfflineAuth auth, ShardBoundary.Requirement shardBoundary, + public ReadGroupStreamer(OfflineAuth auth, ShardBoundary.Requirement shardBoundary, String fields, SexChromosomeFilter sexChromosomeFilter) { this.auth = auth; this.shardBoundary = shardBoundary; diff --git a/src/main/java/com/google/cloud/genomics/dataflow/readers/ReadReader.java b/src/main/java/com/google/cloud/genomics/dataflow/readers/ReadReader.java index dd97909..ce5883c 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/readers/ReadReader.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/readers/ReadReader.java @@ -16,14 +16,13 @@ package com.google.cloud.genomics.dataflow.readers; -import java.io.IOException; import java.util.logging.Logger; import com.google.api.services.genomics.Genomics; import com.google.api.services.genomics.model.Read; import com.google.api.services.genomics.model.SearchReadsRequest; import com.google.cloud.genomics.dataflow.utils.GenomicsOptions; -import com.google.cloud.genomics.utils.GenomicsFactory; +import com.google.cloud.genomics.utils.OfflineAuth; import com.google.cloud.genomics.utils.Paginator; import com.google.cloud.genomics.utils.ShardBoundary; @@ -44,7 +43,7 @@ public class ReadReader extends GenomicsApiReader { * @param auth Auth class containing credentials. * @param readFields Fields to return in responses. */ - public ReadReader(GenomicsFactory.OfflineAuth auth, ShardBoundary.Requirement shardBoundary, String readFields) { + public ReadReader(OfflineAuth auth, ShardBoundary.Requirement shardBoundary, String readFields) { super(auth, readFields); this.shardBoundary = shardBoundary; } @@ -53,13 +52,12 @@ public ReadReader(GenomicsFactory.OfflineAuth auth, ShardBoundary.Requirement sh * Create a ReadReader with no fields parameter, all information will be returned. * @param auth Auth class containing credentials. */ - public ReadReader(GenomicsFactory.OfflineAuth auth, ShardBoundary.Requirement shardBoundary) { + public ReadReader(OfflineAuth auth, ShardBoundary.Requirement shardBoundary) { this(auth, shardBoundary, null); } @Override - protected void processApiCall(Genomics genomics, ProcessContext c, SearchReadsRequest request) - throws IOException { + protected void processApiCall(Genomics genomics, ProcessContext c, SearchReadsRequest request) { LOG.info("Starting Reads read loop"); GenomicsOptions options = c.getPipelineOptions().as(GenomicsOptions.class); diff --git a/src/main/java/com/google/cloud/genomics/dataflow/readers/ReadStreamer.java b/src/main/java/com/google/cloud/genomics/dataflow/readers/ReadStreamer.java index 109fabd..3dcdeb8 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/readers/ReadStreamer.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/readers/ReadStreamer.java @@ -26,7 +26,7 @@ import com.google.cloud.dataflow.sdk.transforms.ParDo; import com.google.cloud.dataflow.sdk.transforms.Sum; import com.google.cloud.dataflow.sdk.values.PCollection; -import com.google.cloud.genomics.utils.GenomicsFactory; +import com.google.cloud.genomics.utils.OfflineAuth; import com.google.cloud.genomics.utils.ShardBoundary; import com.google.cloud.genomics.utils.grpc.ReadStreamIterator; import com.google.common.base.Stopwatch; @@ -40,7 +40,7 @@ public class ReadStreamer extends PTransform, PCollection> { - protected final GenomicsFactory.OfflineAuth auth; + protected final OfflineAuth auth; protected final ShardBoundary.Requirement shardBoundary; protected final String fields; @@ -51,7 +51,7 @@ public class ReadStreamer extends * @param shardBoundary The shard boundary semantics to enforce. * @param fields Which fields to include in a partial response or null for all. */ - public ReadStreamer(GenomicsFactory.OfflineAuth auth, ShardBoundary.Requirement shardBoundary, String fields) { + public ReadStreamer(OfflineAuth auth, ShardBoundary.Requirement shardBoundary, String fields) { this.auth = auth; this.shardBoundary = shardBoundary; this.fields = fields; diff --git a/src/main/java/com/google/cloud/genomics/dataflow/readers/VariantReader.java b/src/main/java/com/google/cloud/genomics/dataflow/readers/VariantReader.java index f7f6b6a..4823dcf 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/readers/VariantReader.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/readers/VariantReader.java @@ -15,14 +15,13 @@ */ package com.google.cloud.genomics.dataflow.readers; -import java.io.IOException; import java.util.logging.Logger; import com.google.api.services.genomics.Genomics; import com.google.api.services.genomics.model.SearchVariantsRequest; import com.google.api.services.genomics.model.Variant; import com.google.cloud.genomics.dataflow.utils.GenomicsDatasetOptions; -import com.google.cloud.genomics.utils.GenomicsFactory; +import com.google.cloud.genomics.utils.OfflineAuth; import com.google.cloud.genomics.utils.Paginator; import com.google.cloud.genomics.utils.ShardBoundary; @@ -37,7 +36,7 @@ public class VariantReader extends GenomicsApiReader 0) { diff --git a/src/main/java/com/google/cloud/genomics/dataflow/readers/VariantStreamer.java b/src/main/java/com/google/cloud/genomics/dataflow/readers/VariantStreamer.java index 8a56bfc..8b88f32 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/readers/VariantStreamer.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/readers/VariantStreamer.java @@ -30,7 +30,7 @@ import com.google.cloud.dataflow.sdk.transforms.ParDo; import com.google.cloud.dataflow.sdk.transforms.Sum; import com.google.cloud.dataflow.sdk.values.PCollection; -import com.google.cloud.genomics.utils.GenomicsFactory; +import com.google.cloud.genomics.utils.OfflineAuth; import com.google.cloud.genomics.utils.ShardBoundary; import com.google.cloud.genomics.utils.grpc.VariantStreamIterator; import com.google.common.base.Stopwatch; @@ -45,7 +45,7 @@ public class VariantStreamer extends PTransform, PCollection> { private static final Logger LOG = LoggerFactory.getLogger(VariantStreamer.class); - protected final GenomicsFactory.OfflineAuth auth; + protected final OfflineAuth auth; protected final ShardBoundary.Requirement shardBoundary; protected final String fields; @@ -56,7 +56,7 @@ public class VariantStreamer extends * @param shardBoundary The shard boundary semantics to enforce. * @param fields Which fields to include in a partial response or null for all. */ - public VariantStreamer(GenomicsFactory.OfflineAuth auth, ShardBoundary.Requirement shardBoundary, String fields) { + public VariantStreamer(OfflineAuth auth, ShardBoundary.Requirement shardBoundary, String fields) { this.auth = auth; this.shardBoundary = shardBoundary; this.fields = fields; diff --git a/src/main/java/com/google/cloud/genomics/dataflow/readers/bam/ReadBAMTransform.java b/src/main/java/com/google/cloud/genomics/dataflow/readers/bam/ReadBAMTransform.java index 4f8b648..05156c4 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/readers/bam/ReadBAMTransform.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/readers/bam/ReadBAMTransform.java @@ -34,7 +34,7 @@ import com.google.cloud.dataflow.sdk.values.TupleTag; import com.google.cloud.genomics.dataflow.utils.GCSOptions; import com.google.cloud.genomics.utils.Contig; -import com.google.cloud.genomics.utils.GenomicsFactory; +import com.google.cloud.genomics.utils.OfflineAuth; import htsjdk.samtools.ValidationStringency; @@ -47,11 +47,11 @@ * a collection of reads by reading BAM files in a sharded manner. */ public class ReadBAMTransform extends PTransform, PCollection> { - GenomicsFactory.OfflineAuth auth; + OfflineAuth auth; ReaderOptions options; public static class ReadFn extends DoFn { - GenomicsFactory.OfflineAuth auth; + OfflineAuth auth; Storage.Objects storage; ReaderOptions options; Aggregator recordCountAggregator; @@ -60,7 +60,7 @@ public static class ReadFn extends DoFn { Aggregator skippedEndCountAggregator; Aggregator skippedRefMismatchAggregator; - public ReadFn(GenomicsFactory.OfflineAuth auth, ReaderOptions options) { + public ReadFn(OfflineAuth auth, ReaderOptions options) { this.auth = auth; this.options = options; recordCountAggregator = createAggregator("Processed records", new SumIntegerFn()); @@ -92,7 +92,7 @@ public void processElement(ProcessContext c) throws java.lang.Exception { public static PCollection getReadsFromBAMFilesSharded( Pipeline p, - GenomicsFactory.OfflineAuth auth, + OfflineAuth auth, Iterable contigs, ReaderOptions options, String BAMFile, @@ -122,11 +122,11 @@ public PCollection apply(PCollection shards) { return reads; } - public GenomicsFactory.OfflineAuth getAuth() { + public OfflineAuth getAuth() { return auth; } - public void setAuth(GenomicsFactory.OfflineAuth auth) { + public void setAuth(OfflineAuth auth) { this.auth = auth; } diff --git a/src/main/java/com/google/cloud/genomics/dataflow/utils/GCSHelper.java b/src/main/java/com/google/cloud/genomics/dataflow/utils/GCSHelper.java index 8921112..73092cf 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/utils/GCSHelper.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/utils/GCSHelper.java @@ -15,17 +15,16 @@ */ package com.google.cloud.genomics.dataflow.utils; -import static com.google.api.services.storage.StorageScopes.DEVSTORAGE_READ_ONLY; - +import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.http.HttpHeaders; import com.google.api.client.http.HttpTransport; import com.google.api.client.json.JsonFactory; import com.google.api.client.json.jackson2.JacksonFactory; -import com.google.api.services.genomics.GenomicsScopes; import com.google.api.services.storage.Storage; -import com.google.api.services.storage.StorageScopes; import com.google.api.services.storage.model.StorageObject; +import com.google.cloud.genomics.utils.CredentialFactory; import com.google.cloud.genomics.utils.GenomicsFactory; +import com.google.cloud.genomics.utils.OfflineAuth; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; @@ -71,14 +70,12 @@ public GCSHelper(GenomicsOptions popts) throws GeneralSecurityException, IOExcep Preconditions.checkNotNull(popts); // set up storage object GenomicsFactory factory = GenomicsFactory.builder(popts.getAppName()) - .setNumberOfRetries(popts.getNumberOfRetries()) - .setScopes(Lists.newArrayList(StorageScopes.DEVSTORAGE_READ_ONLY, GenomicsScopes.GENOMICS)) .build(); httpTransport = factory.getHttpTransport(); Storage.Builder builder = new Storage.Builder(httpTransport, JSON_FACTORY, null) .setApplicationName(popts.getAppName()); - GenomicsFactory.OfflineAuth auth = GenomicsOptions.Methods.getGenomicsAuth(popts); - storage = auth.setupAuthentication(factory, builder).build(); + OfflineAuth auth = GenomicsOptions.Methods.getGenomicsAuth(popts); + storage = factory.fromOfflineAuth(builder, auth).build(); } /** @@ -87,17 +84,13 @@ public GCSHelper(GenomicsOptions popts) throws GeneralSecurityException, IOExcep * * @param offlineAuth serialized credentials */ - public GCSHelper(GenomicsFactory.OfflineAuth offlineAuth) throws GeneralSecurityException, IOException { + public GCSHelper(OfflineAuth offlineAuth) throws GeneralSecurityException, IOException { Preconditions.checkNotNull(offlineAuth); - String appName = offlineAuth.applicationName; // set up storage object - GenomicsFactory factory = GenomicsFactory.builder(appName) - .setScopes(Lists.newArrayList(StorageScopes.DEVSTORAGE_READ_ONLY, GenomicsScopes.GENOMICS)) - .build(); + GenomicsFactory factory = GenomicsFactory.builder().build(); httpTransport = factory.getHttpTransport(); - Storage.Builder builder = new Storage.Builder(httpTransport, JSON_FACTORY, null) - .setApplicationName(appName); - storage = offlineAuth.setupAuthentication(factory, builder).build(); + Storage.Builder builder = new Storage.Builder(httpTransport, JSON_FACTORY, null); + storage = factory.fromOfflineAuth(builder, offlineAuth).build(); } /** @@ -108,14 +101,12 @@ public GCSHelper(GenomicsFactory.OfflineAuth offlineAuth) throws GeneralSecurity */ public GCSHelper(String appName, String secretsFile) throws GeneralSecurityException, IOException { // cf https://groups.google.com/forum/#!msg/google-genomics-discuss/P9A9odUXwaM/ISdIzOXNS3YJ - GenomicsFactory factory = GenomicsFactory.builder(appName) - .setScopes(Lists.newArrayList(DEVSTORAGE_READ_ONLY, GenomicsScopes.GENOMICS)) - .build(); + GenomicsFactory factory = GenomicsFactory.builder(appName).build(); httpTransport = factory.getHttpTransport(); - GenomicsFactory.OfflineAuth offlineAuth = factory.getOfflineAuthFromClientSecretsFile(secretsFile); + Credential creds = CredentialFactory.getCredentialFromClientSecrets(secretsFile, appName); Storage.Builder builder = new Storage.Builder(httpTransport, JSON_FACTORY, null) .setApplicationName(appName); - storage = offlineAuth.setupAuthentication(factory, builder).build(); + storage = factory.fromCredential(builder, creds).build(); } @VisibleForTesting diff --git a/src/main/java/com/google/cloud/genomics/dataflow/utils/GCSOptions.java b/src/main/java/com/google/cloud/genomics/dataflow/utils/GCSOptions.java index 7c39e62..bedbda8 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/utils/GCSOptions.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/utils/GCSOptions.java @@ -15,6 +15,10 @@ */ package com.google.cloud.genomics.dataflow.utils; +import java.util.List; +import java.util.logging.Logger; + +import com.fasterxml.jackson.annotation.JsonIgnore; import com.google.api.client.googleapis.util.Utils; import com.google.api.client.http.HttpTransport; import com.google.api.client.json.JsonFactory; @@ -27,13 +31,8 @@ import com.google.cloud.dataflow.sdk.options.PipelineOptions; import com.google.cloud.dataflow.sdk.transforms.DoFn; import com.google.cloud.genomics.utils.GenomicsFactory; +import com.google.cloud.genomics.utils.OfflineAuth; import com.google.common.collect.ImmutableList; -import com.fasterxml.jackson.annotation.JsonIgnore; - -import java.io.IOException; -import java.security.GeneralSecurityException; -import java.util.List; -import java.util.logging.Logger; /** * Options for pipelines that need to access GCS storage. @@ -113,30 +112,28 @@ class Methods { private Methods() { } - public static GenomicsFactory.OfflineAuth createGCSAuth(GCSOptions options) - throws IOException, GeneralSecurityException { + public static OfflineAuth createGCSAuth(GCSOptions options) { return GenomicsOptions.Methods.getGenomicsAuth(options); } public static Storage.Objects createStorageClient( - DoFn.Context context, GenomicsFactory.OfflineAuth auth) throws IOException { + DoFn.Context context, OfflineAuth auth) { final GCSOptions gcsOptions = context.getPipelineOptions().as(GCSOptions.class); return createStorageClient(gcsOptions, auth); } public static Storage.Objects createStorageClient(GCSOptions gcsOptions, - GenomicsFactory.OfflineAuth auth) throws IOException { - LOG.info("Creating storgae client for " + auth.applicationName); + OfflineAuth auth) { final Storage.Builder storageBuilder = new Storage.Builder( gcsOptions.getTransport(), gcsOptions.getJsonFactory(), null); - return auth - .setupAuthentication(gcsOptions.getGenomicsFactory(), storageBuilder) + return gcsOptions.getGenomicsFactory() + .fromOfflineAuth(storageBuilder, auth) .build() - .objects(); + .objects(); } } diff --git a/src/main/java/com/google/cloud/genomics/dataflow/utils/GenomicsDatasetOptions.java b/src/main/java/com/google/cloud/genomics/dataflow/utils/GenomicsDatasetOptions.java index 3a0f47e..22a2f97 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/utils/GenomicsDatasetOptions.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/utils/GenomicsDatasetOptions.java @@ -39,7 +39,6 @@ public static void validateOptions(GenomicsDatasetOptions options) { Preconditions.checkState(false, "output must be a valid Google Cloud Storage URL (starting with gs://)"); } } - GenomicsOptions.Methods.validateOptions(options); } } diff --git a/src/main/java/com/google/cloud/genomics/dataflow/utils/GenomicsOptions.java b/src/main/java/com/google/cloud/genomics/dataflow/utils/GenomicsOptions.java index ce49eac..d38d711 100644 --- a/src/main/java/com/google/cloud/genomics/dataflow/utils/GenomicsOptions.java +++ b/src/main/java/com/google/cloud/genomics/dataflow/utils/GenomicsOptions.java @@ -13,57 +13,58 @@ */ package com.google.cloud.genomics.dataflow.utils; -import java.io.IOException; -import java.security.GeneralSecurityException; +import java.util.Scanner; import com.google.cloud.dataflow.sdk.options.Default; import com.google.cloud.dataflow.sdk.options.Description; import com.google.cloud.dataflow.sdk.options.GcsOptions; -import com.google.cloud.genomics.utils.GenomicsFactory; -import com.google.cloud.genomics.utils.GenomicsFactory.Builder; +import com.google.cloud.dataflow.sdk.runners.DirectPipelineRunner; +import com.google.cloud.genomics.utils.OfflineAuth; /** - * Contains common genomics pipeline options. Extend this class to add additional command line args. - * - * Note: All methods defined in this class will be called during command line parsing unless it is - * annotated with a @JsonIgnore annotation. + * Contains pipeline options relevant to the creation of Genomics API clients. */ public interface GenomicsOptions extends GcsOptions { public static class Methods { - public static GenomicsFactory.OfflineAuth getGenomicsAuth(GenomicsOptions options) throws GeneralSecurityException, IOException { - Builder builder = - GenomicsFactory.builder(options.getAppName()).setNumberOfRetries(options.getNumberOfRetries()); + public static OfflineAuth getGenomicsAuth(GenomicsOptions options) { + if (DirectPipelineRunner.class != options.getRunner() + && null != options.getSecretsFile() + && options.getWarnUserCredential()) { + System.out.println("\nThis pipeline will run on GCE VMs and your user credential will" + + " be used by all Dataflow worker instances. Your credentials may be visible to" + + " others with access to the VMs."); - String secretsFile = options.getSecretsFile(), apiKey = options.getApiKey(); - if (secretsFile == null && apiKey == null) { - throw new IllegalArgumentException( - "Need to specify either --secretsFile or --apiKey"); + System.out.println("Do you want to continue (Y/n)?"); + Scanner kbd = new Scanner(System.in); + String decision; + decision = kbd.nextLine(); + switch(decision) { + case "yes": case "Yes": case "YES": case "y": case "Y": + break; + default: + System.exit(0); + } } - if(null != secretsFile) { - return builder.build().getOfflineAuthFromCredential(options.getGcpCredential(), - secretsFile); + if (null != options.getSecretsFile()) { + // User credential will be available on all Dataflow workers. + return new OfflineAuth(options.getGcpCredential()); } - return builder.build().getOfflineAuthFromApiKey(apiKey); - } - - public static void validateOptions(GenomicsOptions options) { + // This "empty" OfflineAuth will default to the Application + // Default Credential available from whereever it is + // accessed (e.g., locally or on GCE). + return new OfflineAuth(); } } - @Description("If querying a public dataset, provide a Google API key that has access " - + "to genomics data and no OAuth will be performed.") - String getApiKey(); - - void setApiKey(String apiKey); - - @Description("Specifies the maximum number of retries to attempt (if needed) for requests to the Genomics API.") - @Default.Integer(10) - int getNumberOfRetries(); + @Description("Set this option to 'false' to disable the yes/no prompt when running" + + " the pipeline with a user credential.") + @Default.Boolean(true) + boolean getWarnUserCredential(); - void setNumberOfRetries(int numOfRetries); + void setWarnUserCredential(boolean warnUserCredential); @Description("Specifies number of results to return in a single page of results. " + "If unspecified, the default page size for the Genomics API is used.") diff --git a/src/test/java/com/google/cloud/genomics/dataflow/functions/AlleleSimilarityCalculatorTest.java b/src/test/java/com/google/cloud/genomics/dataflow/functions/AlleleSimilarityCalculatorTest.java index e6f9fe9..0da2973 100644 --- a/src/test/java/com/google/cloud/genomics/dataflow/functions/AlleleSimilarityCalculatorTest.java +++ b/src/test/java/com/google/cloud/genomics/dataflow/functions/AlleleSimilarityCalculatorTest.java @@ -20,27 +20,41 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import java.util.Collections; import java.util.List; import org.hamcrest.CoreMatchers; import org.junit.Test; -import com.google.api.services.genomics.model.Variant; import com.google.cloud.dataflow.sdk.transforms.DoFnTester; import com.google.cloud.dataflow.sdk.values.KV; import com.google.cloud.genomics.dataflow.utils.DataUtils; +import com.google.genomics.v1.Variant; public class AlleleSimilarityCalculatorTest { - static final Variant snp1 = DataUtils.makeVariant("chr7", 200019, 200020, "T", Collections.singletonList("G"), - DataUtils.makeCall("het-alt sample", 1, 0), DataUtils.makeCall("hom-alt sample", 1, 1), - DataUtils.makeCall("hom-ref sample", 0, 0), DataUtils.makeCall("hom-nocall sample", -1, -1), - DataUtils.makeCall("ref-nocall sample", -1, 0)); + static final Variant snp1 = Variant.newBuilder() + .setReferenceName("chr7") + .setStart(200019) + .setEnd(200020) + .setReferenceBases("T") + .addAlternateBases("G") + .addCalls(DataUtils.makeVariantCall("het-alt sample", 1, 0)) + .addCalls(DataUtils.makeVariantCall("hom-alt sample", 1, 1)) + .addCalls(DataUtils.makeVariantCall("hom-ref sample", 0, 0)) + .addCalls(DataUtils.makeVariantCall("hom-nocall sample", -1, -1)) + .addCalls(DataUtils.makeVariantCall("ref-nocall sample", -1, 0)) + .build(); - static final Variant snp2 = DataUtils.makeVariant("chr7", 200020, 200021, "C", Collections.singletonList("A"), - DataUtils.makeCall("hom-alt sample", 1, 1), DataUtils.makeCall("het-alt sample", 0, 1), - DataUtils.makeCall("ref-nocall sample", 0, -1)); + static final Variant snp2 = Variant.newBuilder() + .setReferenceName("chr7") + .setStart(200020) + .setEnd(200021) + .setReferenceBases("C") + .addAlternateBases("A") + .addCalls(DataUtils.makeVariantCall("hom-alt sample", 1, 1)) + .addCalls(DataUtils.makeVariantCall("het-alt sample", 0, 1)) + .addCalls(DataUtils.makeVariantCall("ref-nocall sample", 0, -1)) + .build(); @Test public void testIsReferenceMajor() { diff --git a/src/test/java/com/google/cloud/genomics/dataflow/functions/CallSimilarityCalculatorTest.java b/src/test/java/com/google/cloud/genomics/dataflow/functions/CallSimilarityCalculatorTest.java index 3032e95..df54992 100644 --- a/src/test/java/com/google/cloud/genomics/dataflow/functions/CallSimilarityCalculatorTest.java +++ b/src/test/java/com/google/cloud/genomics/dataflow/functions/CallSimilarityCalculatorTest.java @@ -29,18 +29,18 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import com.google.api.services.genomics.model.Call; -import com.google.api.services.genomics.model.Variant; import com.google.cloud.dataflow.sdk.transforms.DoFnTester; import com.google.cloud.dataflow.sdk.values.KV; import com.google.cloud.genomics.dataflow.utils.DataUtils; +import com.google.genomics.v1.Variant; +import com.google.genomics.v1.VariantCall; @RunWith(JUnit4.class) public class CallSimilarityCalculatorTest { private static final double DELTA = 1e-6; - private List calls = newArrayList(); + private List calls = newArrayList(); private List variants = newArrayList(); @@ -52,20 +52,20 @@ public class CallSimilarityCalculatorTest { @Before public void setUp() { - calls.add(DataUtils.makeCall(H1, 0, 0)); - calls.add(DataUtils.makeCall(H2, 1, 0)); - calls.add(DataUtils.makeCall(H3, 0, 1)); - calls.add(DataUtils.makeCall(H2, 1, 1)); - calls.add(DataUtils.makeCall(H3, 1, 1)); - calls.add(DataUtils.makeCall(H2, 1)); - calls.add(DataUtils.makeCall(H3, 0)); - calls.add(DataUtils.makeCall(H2, 1, 0, 1)); - calls.add(DataUtils.makeCall(H3, 1, 0, 0)); - - variants.add(DataUtils.makeSimpleVariant(calls.get(0), calls.get(1), calls.get(2))); - variants.add(DataUtils.makeSimpleVariant(calls.get(0), calls.get(3), calls.get(4))); - variants.add(DataUtils.makeSimpleVariant(calls.get(0), calls.get(5), calls.get(6))); - variants.add(DataUtils.makeSimpleVariant(calls.get(0), calls.get(7), calls.get(8))); + calls.add(DataUtils.makeVariantCall(H1, 0, 0)); + calls.add(DataUtils.makeVariantCall(H2, 1, 0)); + calls.add(DataUtils.makeVariantCall(H3, 0, 1)); + calls.add(DataUtils.makeVariantCall(H2, 1, 1)); + calls.add(DataUtils.makeVariantCall(H3, 1, 1)); + calls.add(DataUtils.makeVariantCall(H2, 1)); + calls.add(DataUtils.makeVariantCall(H3, 0)); + calls.add(DataUtils.makeVariantCall(H2, 1, 0, 1)); + calls.add(DataUtils.makeVariantCall(H3, 1, 0, 0)); + + variants.add(Variant.newBuilder().addCalls(calls.get(0)).addCalls(calls.get(1)).addCalls(calls.get(2)).build()); + variants.add(Variant.newBuilder().addCalls(calls.get(0)).addCalls(calls.get(3)).addCalls(calls.get(4)).build()); + variants.add(Variant.newBuilder().addCalls(calls.get(0)).addCalls(calls.get(5)).addCalls(calls.get(6)).build()); + variants.add(Variant.newBuilder().addCalls(calls.get(0)).addCalls(calls.get(7)).addCalls(calls.get(8)).build()); } @Test diff --git a/src/test/java/com/google/cloud/genomics/dataflow/pipelines/CountReadsITCase.java b/src/test/java/com/google/cloud/genomics/dataflow/pipelines/CountReadsITCase.java index 0a021ed..c75d187 100644 --- a/src/test/java/com/google/cloud/genomics/dataflow/pipelines/CountReadsITCase.java +++ b/src/test/java/com/google/cloud/genomics/dataflow/pipelines/CountReadsITCase.java @@ -75,7 +75,6 @@ private void testLocalBase(String outputFilename, String contig, String bamFilen boolean includeUnmapped) throws Exception { final String OUTPUT = helper.getTestOutputGcsFolder()+ outputFilename; String[] ARGS = { - "--apiKey=" + helper.getApiKey(), "--output=" + OUTPUT, "--references=" + contig, "--includeUnmapped=" + includeUnmapped, @@ -126,7 +125,6 @@ public void testLocalNA12877_S1_UNMAPPED() throws Exception { private void testCloudBase(String outputFilename, String contig, String bamFilename, long expectedCount) throws Exception { final String OUTPUT = helper.getTestOutputGcsFolder() + outputFilename; String[] ARGS = { - "--apiKey=" + helper.getApiKey(), "--project=" + helper.getTestProject(), "--output=" + OUTPUT, "--numWorkers=2", @@ -167,7 +165,6 @@ public void testCloudNA12877_S1() throws Exception { public void testCloudWithAPIBase(String outputFilename, String contig, String readGroupSetId, long expectedCount) throws Exception { final String OUTPUT = helper.getTestOutputGcsFolder() + outputFilename; String[] ARGS = { - "--apiKey=" + helper.getApiKey(), "--project=" + helper.getTestProject(), "--output=" + OUTPUT, "--numWorkers=2", diff --git a/src/test/java/com/google/cloud/genomics/dataflow/pipelines/IntegrationTestHelper.java b/src/test/java/com/google/cloud/genomics/dataflow/pipelines/IntegrationTestHelper.java index 45365ac..92bfa62 100644 --- a/src/test/java/com/google/cloud/genomics/dataflow/pipelines/IntegrationTestHelper.java +++ b/src/test/java/com/google/cloud/genomics/dataflow/pipelines/IntegrationTestHelper.java @@ -17,6 +17,8 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; +import htsjdk.samtools.SamReader; +import htsjdk.samtools.ValidationStringency; import java.io.BufferedReader; import java.io.IOException; @@ -33,13 +35,9 @@ import com.google.cloud.genomics.dataflow.readers.bam.BAMIO; import com.google.cloud.genomics.dataflow.utils.GenomicsOptions; -import htsjdk.samtools.SamReader; -import htsjdk.samtools.ValidationStringency; - public class IntegrationTestHelper { // Test configuration constants - private final String API_KEY = System.getenv("GOOGLE_API_KEY"); private final String TEST_PROJECT = System.getenv("TEST_PROJECT"); private final String TEST_OUTPUT_GCS_FOLDER = System.getenv("TEST_OUTPUT_GCS_FOLDER"); private final String TEST_STAGING_GCS_FOLDER = System.getenv("TEST_STAGING_GCS_FOLDER"); @@ -53,7 +51,6 @@ public class IntegrationTestHelper { GcsUtil gcsUtil; public IntegrationTestHelper() { - assertNotNull("You must set the GOOGLE_API_KEY environment variable for this test.", API_KEY); assertNotNull("You must set the TEST_PROJECT environment variable for this test.", TEST_PROJECT); assertNotNull("You must set the TEST_OUTPUT_GCS_FOLDER environment variable for this test.", TEST_OUTPUT_GCS_FOLDER); assertNotNull("You must set the TEST_STAGING_GCS_FOLDER environment variable for this test.", TEST_STAGING_GCS_FOLDER); @@ -62,17 +59,9 @@ public IntegrationTestHelper() { assertTrue("TEST_STAGING_GCS_FOLDER must start with 'gs://'", TEST_STAGING_GCS_FOLDER.startsWith("gs://")); // we don't care how TEST_STAGING_GCS_FOLDER ends, so no check for it. - popts.setApiKey(API_KEY); gcsUtil = new GcsUtil.GcsUtilFactory().create(popts); } - /** - * @return the API_KEY - */ - public String getApiKey() { - return API_KEY; - } - /** * @return the TEST_PROJECT */ diff --git a/src/test/java/com/google/cloud/genomics/dataflow/pipelines/ShardedBAMWritingITCase.java b/src/test/java/com/google/cloud/genomics/dataflow/pipelines/ShardedBAMWritingITCase.java index e1aa475..c7d4f6d 100644 --- a/src/test/java/com/google/cloud/genomics/dataflow/pipelines/ShardedBAMWritingITCase.java +++ b/src/test/java/com/google/cloud/genomics/dataflow/pipelines/ShardedBAMWritingITCase.java @@ -63,7 +63,6 @@ public static void setUpBeforeClass() { public void testShardedWriting() throws Exception { final String OUTPUT = helper.getTestOutputGcsFolder() + OUTPUT_FNAME; String[] ARGS = { - "--apiKey=" + helper.getApiKey(), "--project=" + helper.getTestProject(), "--output=" + OUTPUT, "--numWorkers=18", diff --git a/src/test/java/com/google/cloud/genomics/dataflow/pipelines/VariantSimilarityITCase.java b/src/test/java/com/google/cloud/genomics/dataflow/pipelines/VariantSimilarityITCase.java index 91c5146..167ece5 100644 --- a/src/test/java/com/google/cloud/genomics/dataflow/pipelines/VariantSimilarityITCase.java +++ b/src/test/java/com/google/cloud/genomics/dataflow/pipelines/VariantSimilarityITCase.java @@ -25,9 +25,7 @@ import org.hamcrest.CoreMatchers; import org.junit.After; -import org.junit.Before; import org.junit.BeforeClass; -import org.junit.Ignore; import org.junit.Test; import com.google.api.client.util.Lists; @@ -94,7 +92,6 @@ public void tearDown() throws Exception { @Test public void testPaginatedLocal() throws IOException, GeneralSecurityException { String[] ARGS = { - "--apiKey=" + helper.getApiKey(), "--references=" + helper.PLATINUM_GENOMES_BRCA1_REFERENCES, "--datasetId=" + helper.PLATINUM_GENOMES_DATASET, "--output=" + outputPrefix, @@ -106,7 +103,6 @@ public void testPaginatedLocal() throws IOException, GeneralSecurityException { @Test public void testPaginatedCloud() throws IOException, GeneralSecurityException { String[] ARGS = { - "--apiKey=" + helper.getApiKey(), "--references=" + helper.PLATINUM_GENOMES_BRCA1_REFERENCES, "--datasetId=" + helper.PLATINUM_GENOMES_DATASET, "--output=" + outputPrefix, @@ -118,13 +114,9 @@ public void testPaginatedCloud() throws IOException, GeneralSecurityException { testBase(ARGS); } - @Ignore - // TODO enable this test. For it to work, we'll need to add alpn to the classpath - // and figure out https://github.com/googlegenomics/dataflow-java/issues/119 @Test public void testStreamingLocal() throws IOException, GeneralSecurityException { String[] ARGS = { - "--apiKey=" + helper.getApiKey(), "--references=" + helper.PLATINUM_GENOMES_BRCA1_REFERENCES, "--datasetId=" + helper.PLATINUM_GENOMES_DATASET, "--output=" + outputPrefix, @@ -136,7 +128,6 @@ public void testStreamingLocal() throws IOException, GeneralSecurityException { @Test public void testStreamingCloud() throws IOException, GeneralSecurityException { String[] ARGS = { - "--apiKey=" + helper.getApiKey(), "--references=" + helper.PLATINUM_GENOMES_BRCA1_REFERENCES, "--datasetId=" + helper.PLATINUM_GENOMES_DATASET, "--output=" + outputPrefix, diff --git a/src/test/java/com/google/cloud/genomics/dataflow/readers/bam/BAMIOITCase.java b/src/test/java/com/google/cloud/genomics/dataflow/readers/bam/BAMIOITCase.java index 66f0e54..87509d6 100644 --- a/src/test/java/com/google/cloud/genomics/dataflow/readers/bam/BAMIOITCase.java +++ b/src/test/java/com/google/cloud/genomics/dataflow/readers/bam/BAMIOITCase.java @@ -15,31 +15,22 @@ */ package com.google.cloud.genomics.dataflow.readers.bam; -import com.google.api.services.genomics.model.Read; -import com.google.api.services.storage.Storage; -import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory; -import com.google.cloud.dataflow.sdk.util.Transport; -import com.google.cloud.genomics.dataflow.utils.GCSOptions; -import com.google.cloud.genomics.dataflow.utils.GenomicsOptions; -import com.google.cloud.genomics.gatk.common.GenomicsConverter; - -import htsjdk.samtools.SAMFileHeader; -import htsjdk.samtools.SAMRecord; import htsjdk.samtools.SAMRecordIterator; import htsjdk.samtools.SamReader; -import htsjdk.samtools.SamReaderFactory; import htsjdk.samtools.ValidationStringency; +import java.io.IOException; + import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.File; -import java.io.IOException; - -import static org.junit.Assert.assertEquals; +import com.google.api.services.storage.Storage; +import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory; +import com.google.cloud.dataflow.sdk.util.Transport; +import com.google.cloud.genomics.dataflow.utils.GCSOptions; @RunWith(JUnit4.class) public class BAMIOITCase { @@ -55,7 +46,6 @@ public void voidEnsureEnvVar() { @Test public void openBAMTest() throws IOException { GCSOptions popts = PipelineOptionsFactory.create().as(GCSOptions.class); - popts.setApiKey(API_KEY); final Storage.Objects storageClient = Transport.newStorageClient(popts).build().objects(); SamReader samReader = BAMIO.openBAM(storageClient, TEST_BAM_FNAME, ValidationStringency.DEFAULT_STRINGENCY); diff --git a/src/test/java/com/google/cloud/genomics/dataflow/utils/GCSHelperITCase.java b/src/test/java/com/google/cloud/genomics/dataflow/utils/GCSHelperITCase.java index 68d2b49..691d89e 100644 --- a/src/test/java/com/google/cloud/genomics/dataflow/utils/GCSHelperITCase.java +++ b/src/test/java/com/google/cloud/genomics/dataflow/utils/GCSHelperITCase.java @@ -16,27 +16,23 @@ package com.google.cloud.genomics.dataflow.utils; -import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory; -import com.google.cloud.genomics.utils.GenomicsFactory; -import com.google.common.hash.HashingInputStream; - -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - -import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; -import java.io.StringReader; import java.security.DigestInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Arrays; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory; +import com.google.cloud.genomics.utils.OfflineAuth; + /** * * This test expects you to have a Google Cloud API key in the GOOGLE_API_KEY environment variable. @@ -46,8 +42,6 @@ @RunWith(JUnit4.class) public class GCSHelperITCase { - final String API_KEY = System.getenv("GOOGLE_API_KEY"); - final String[] ARGS = { "--apiKey="+API_KEY }; // this file shouldn't move. final String TEST_BUCKET = "genomics-public-data"; final String TEST_FNAME = "ftp-trace.ncbi.nih.gov/1000genomes/ftp/20131219.populations.tsv"; @@ -56,17 +50,10 @@ public class GCSHelperITCase { // Test the various ways of getting a GCSHelper - // We're not testing testClientSecrets because we can't assume the test machine will have the file. - - @Before - public void voidEnsureEnvVar() { - Assert.assertNotNull("You must set the GOOGLE_API_KEY environment variable for this test.", API_KEY); - } - @Test public void testOfflineAuth() throws Exception { - GenomicsOptions options = PipelineOptionsFactory.fromArgs(ARGS).as(GenomicsOptions.class); - GenomicsFactory.OfflineAuth offlineAuth = GenomicsOptions.Methods.getGenomicsAuth(options); + GenomicsOptions options = PipelineOptionsFactory.as(GenomicsOptions.class); + OfflineAuth offlineAuth = GenomicsOptions.Methods.getGenomicsAuth(options); GCSHelper gcsHelper = new GCSHelper(offlineAuth); long fileSize = gcsHelper.getFileSize(TEST_BUCKET, TEST_FNAME); Assert.assertEquals(TEST_FSIZE, fileSize); @@ -74,7 +61,7 @@ public void testOfflineAuth() throws Exception { @Test public void testPipelineOptions() throws Exception { - GenomicsOptions options = PipelineOptionsFactory.fromArgs(ARGS).as(GenomicsOptions.class); + GenomicsOptions options = PipelineOptionsFactory.as(GenomicsOptions.class); GCSHelper gcsHelper = new GCSHelper(options); long fileSize = gcsHelper.getFileSize(TEST_BUCKET, TEST_FNAME); Assert.assertEquals(TEST_FSIZE, fileSize); @@ -84,7 +71,7 @@ public void testPipelineOptions() throws Exception { @Test public void testGetPartial() throws Exception { - GenomicsOptions options = PipelineOptionsFactory.fromArgs(ARGS).as(GenomicsOptions.class); + GenomicsOptions options = PipelineOptionsFactory.as(GenomicsOptions.class); GCSHelper gcsHelper = new GCSHelper(options); String partial = gcsHelper.getPartialObjectData(TEST_BUCKET, TEST_FNAME, 34, 37).toString(); Assert.assertEquals("Code", partial); @@ -92,7 +79,7 @@ public void testGetPartial() throws Exception { @Test public void testGetWhole() throws Exception { - GenomicsOptions options = PipelineOptionsFactory.fromArgs(ARGS).as(GenomicsOptions.class); + GenomicsOptions options = PipelineOptionsFactory.as(GenomicsOptions.class); GCSHelper gcsHelper = new GCSHelper(options); InputStream input = gcsHelper.getWholeObject(TEST_BUCKET, TEST_FNAME); byte[] digest = md5sum(input); @@ -101,7 +88,7 @@ public void testGetWhole() throws Exception { @Test public void testDownload() throws Exception { - GenomicsOptions options = PipelineOptionsFactory.fromArgs(ARGS).as(GenomicsOptions.class); + GenomicsOptions options = PipelineOptionsFactory.as(GenomicsOptions.class); GCSHelper gcsHelper = new GCSHelper(options); File tmpFile = gcsHelper.getAsFile(TEST_BUCKET, TEST_FNAME); byte[] digest = md5sum(new FileInputStream(tmpFile));