You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by bh...@apache.org on 2022/06/16 20:36:02 UTC

[beam] branch master updated: Update references to jira to GH for the Runners (#21835)

This is an automated email from the ASF dual-hosted git repository.

bhulette pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git


The following commit(s) were added to refs/heads/master by this push:
     new b774ff5fe9f Update references to jira to GH for the Runners (#21835)
b774ff5fe9f is described below

commit b774ff5fe9f851ad6a8522afdc9942a9961e9cee
Author: Danny McCormick <da...@google.com>
AuthorDate: Thu Jun 16 16:35:56 2022 -0400

    Update references to jira to GH for the Runners (#21835)
    
    * Update references to jira to GH for the Runners
    
    * Spotless format
    
    * Switch to urls
    
    * Spotless apply
    
    * Spotless apply
    
    * Fix awkward formatting
---
 .../beam/runners/core/construction/External.java   |  3 +-
 .../core/construction/NativeTransforms.java        |  2 +-
 .../core/construction/PTransformTranslation.java   |  3 +-
 .../core/construction/ParDoTranslation.java        |  3 +-
 .../runners/core/construction/SplittableParDo.java |  6 ++-
 .../graph/ProjectionProducerVisitor.java           |  2 +-
 .../graph/ProjectionPushdownOptimizer.java         |  2 +-
 .../core/construction/PTransformMatchersTest.java  |  4 +-
 .../graph/ProjectionPushdownOptimizerTest.java     |  4 +-
 .../apache/beam/runners/core/WatermarkHold.java    |  3 +-
 .../beam/runners/core/metrics/HistogramCell.java   |  3 +-
 .../core/metrics/MetricsContainerStepMap.java      |  3 +-
 .../beam/runners/core/SimpleDoFnRunnerTest.java    |  4 +-
 runners/direct-java/build.gradle                   |  2 +-
 .../direct/BoundedReadEvaluatorFactory.java        |  3 +-
 .../apache/beam/runners/direct/DirectRunner.java   |  3 +-
 .../direct/ExecutorServiceParallelExecutor.java    |  3 +-
 .../runners/direct/TransformExecutorServices.java  |  6 ++-
 .../direct/StatefulParDoEvaluatorFactoryTest.java  |  4 +-
 runners/flink/flink_runner.gradle                  |  6 +--
 runners/flink/job-server/flink_job_server.gradle   |  8 ++--
 .../org/apache/beam/runners/flink/FlinkRunner.java |  3 +-
 .../functions/FlinkExecutableStageFunction.java    |  3 +-
 .../streaming/ExecutableStageDoFnOperator.java     |  6 +--
 .../flink/FlinkRequiresStableInputTest.java        |  2 +-
 .../beam/runners/flink/FlinkSavepointTest.java     |  4 +-
 .../FlinkStreamingPipelineTranslatorTest.java      |  3 +-
 .../runners/flink/PortableStateExecutionTest.java  |  3 +-
 .../runners/flink/PortableTimersExecutionTest.java |  3 +-
 .../wrappers/streaming/DoFnOperatorTest.java       |  4 +-
 runners/google-cloud-dataflow-java/build.gradle    | 36 +++++++--------
 .../dataflow/BatchStatefulParDoOverridesTest.java  |  3 +-
 .../beam/runners/dataflow/DataflowRunnerTest.java  |  3 +-
 .../google-cloud-dataflow-java/worker/build.gradle |  4 +-
 .../worker/legacy-worker/build.gradle              |  6 +--
 .../dataflow/worker/BatchModeExecutionContext.java |  6 ++-
 .../dataflow/worker/StreamingDataflowWorker.java   |  3 +-
 .../graph/CreateExecutableStageNodeFunction.java   |  6 ++-
 .../dataflow/worker/IsmSideInputReaderTest.java    |  2 +-
 .../worker/StreamingDataflowWorkerTest.java        |  3 +-
 .../dataflow/worker/UserParDoFnFactoryTest.java    |  3 +-
 .../fnexecution/control/SdkHarnessClient.java      |  5 +-
 .../environment/DockerEnvironmentFactory.java      |  2 +-
 .../provisioning/StaticGrpcProvisionService.java   |  2 +-
 .../control/ProcessBundleDescriptorsTest.java      |  3 +-
 .../fnexecution/control/RemoteExecutionTest.java   |  4 +-
 runners/samza/build.gradle                         | 24 +++++-----
 runners/samza/job-server/build.gradle              | 54 +++++++++++-----------
 .../org/apache/beam/runners/samza/SamzaRunner.java |  4 +-
 .../samza/metrics/SamzaMetricsContainer.java       |  2 +-
 .../samza/translation/ReshuffleTranslator.java     |  2 +-
 .../SamzaPortablePipelineTranslator.java           |  2 +-
 .../samza/translation/SamzaTransformOverrides.java |  3 +-
 .../runtime/SamzaStoreStateInternalsTest.java      |  4 +-
 .../samza/translation/ConfigGeneratorTest.java     |  3 +-
 runners/spark/job-server/spark_job_server.gradle   | 16 +++----
 runners/spark/spark_runner.gradle                  |  2 +-
 .../org/apache/beam/runners/spark/SparkRunner.java |  3 +-
 .../beam/runners/spark/SparkRunnerDebugger.java    |  3 +-
 .../runners/spark/SparkTransformOverrides.java     |  3 +-
 .../SparkStructuredStreamingRunner.java            |  3 +-
 .../translation/SparkTransformOverrides.java       |  3 +-
 .../SparkStreamingPortablePipelineTranslator.java  |  5 +-
 .../streaming/StreamingTransformTranslator.java    |  3 +-
 .../ResumeFromCheckpointStreamingTest.java         |  2 +-
 .../beam/runners/twister2/Twister2Runner.java      |  6 ++-
 66 files changed, 200 insertions(+), 146 deletions(-)

diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/External.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/External.java
index bf7b8d408ee..0a10ab20a2b 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/External.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/External.java
@@ -243,7 +243,8 @@ public class External {
                     PValues.expandInput(PBegin.in(p)),
                     ImmutableMap.of(entry.getKey(), (PCollection<?>) entry.getValue()),
                     Impulse.create(),
-                    // TODO(BEAM-12082): Add proper support for Resource Hints with XLang.
+                    // TODO(https://github.com/apache/beam/issues/18371): Add proper support for
+                    // Resource Hints with XLang.
                     ResourceHints.create(),
                     p);
             // using fake Impulses to provide inputs
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/NativeTransforms.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/NativeTransforms.java
index 9ea47a10835..19127acfca3 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/NativeTransforms.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/NativeTransforms.java
@@ -46,7 +46,7 @@ public class NativeTransforms {
    * Returns true if an only if the Runner understands this transform and can handle it directly.
    */
   public static boolean isNative(RunnerApi.PTransform pTransform) {
-    // TODO(BEAM-10109) Use default (context) classloader.
+    // TODO(https://github.com/apache/beam/issues/20192) Use default (context) classloader.
     Iterator<IsNativeTransform> matchers =
         ServiceLoader.load(IsNativeTransform.class, NativeTransforms.class.getClassLoader())
             .iterator();
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformTranslation.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformTranslation.java
index 9010b7203c8..e701ae60bb5 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformTranslation.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformTranslation.java
@@ -442,7 +442,8 @@ public class PTransformTranslation {
 
         // Required runner implemented transforms should not have an environment id.
         if (!RUNNER_IMPLEMENTED_TRANSFORMS.contains(spec.getUrn())) {
-          // TODO(BEAM-9309): Remove existing hacks around deprecated READ transform.
+          // TODO(https://github.com/apache/beam/issues/20094): Remove existing hacks around
+          // deprecated READ transform.
           if (spec.getUrn().equals(READ_TRANSFORM_URN)) {
             // Only assigning environment to Bounded reads. Not assigning an environment to
             // Unbounded
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ParDoTranslation.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ParDoTranslation.java
index 20472fa1eff..78d54426da3 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ParDoTranslation.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ParDoTranslation.java
@@ -599,7 +599,8 @@ public class ParDoTranslation {
                 .setOrderedListSpec(
                     RunnerApi.OrderedListStateSpec.newBuilder()
                         .setElementCoderId(registerCoderOrThrow(components, elementCoder)))
-                // TODO(BEAM-10650): Update with correct protocol once the protocol is defined and
+                // TODO(https://github.com/apache/beam/issues/20486): Update with correct protocol
+                // once the protocol is defined and
                 // the SDK harness uses it.
                 .build();
           }
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/SplittableParDo.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/SplittableParDo.java
index 1cdee8d1cd8..a29421e6906 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/SplittableParDo.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/SplittableParDo.java
@@ -691,7 +691,8 @@ public class SplittableParDo<InputT, OutputT, RestrictionT, WatermarkEstimatorSt
    * PrimitiveBoundedRead} and {@link PrimitiveUnboundedRead} if either the experiment {@code
    * use_deprecated_read} or {@code beam_fn_api_use_deprecated_read} are specified.
    *
-   * <p>TODO(BEAM-10670): Remove the primitive Read and make the splittable DoFn the only option.
+   * <p>TODO(https://github.com/apache/beam/issues/20530): Remove the primitive Read and make the
+   * splittable DoFn the only option.
    */
   public static void convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(Pipeline pipeline) {
     if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "use_sdf_read")
@@ -706,7 +707,8 @@ public class SplittableParDo<InputT, OutputT, RestrictionT, WatermarkEstimatorSt
    * Converts {@link Read} based Splittable DoFn expansions to primitive reads implemented by {@link
    * PrimitiveBoundedRead} and {@link PrimitiveUnboundedRead}.
    *
-   * <p>TODO(BEAM-10670): Remove the primitive Read and make the splittable DoFn the only option.
+   * <p>TODO(https://github.com/apache/beam/issues/20530): Remove the primitive Read and make the
+   * splittable DoFn the only option.
    */
   public static void convertReadBasedSplittableDoFnsToPrimitiveReads(Pipeline pipeline) {
     pipeline.replaceAll(
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/ProjectionProducerVisitor.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/ProjectionProducerVisitor.java
index da3f66ef43f..4e0fa3fb734 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/ProjectionProducerVisitor.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/ProjectionProducerVisitor.java
@@ -57,7 +57,7 @@ class ProjectionProducerVisitor extends PipelineVisitor.Defaults {
   public CompositeBehavior enterCompositeTransform(Node node) {
     PTransform<?, ?> transform = node.getTransform();
 
-    // TODO(BEAM-13658) Support inputs other than PBegin.
+    // TODO(https://github.com/apache/beam/issues/21359) Support inputs other than PBegin.
     if (!node.getInputs().isEmpty()) {
       return CompositeBehavior.DO_NOT_ENTER_TRANSFORM;
     }
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/ProjectionPushdownOptimizer.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/ProjectionPushdownOptimizer.java
index b7d0470b6dc..a3c6bb20cc2 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/ProjectionPushdownOptimizer.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/ProjectionPushdownOptimizer.java
@@ -95,7 +95,7 @@ public class ProjectionPushdownOptimizer {
     }
   }
 
-  // TODO(BEAM-13658) Support inputs other than PBegin.
+  // TODO(https://github.com/apache/beam/issues/21359) Support inputs other than PBegin.
   private static class PushdownOverrideFactory<
           OutputT extends POutput, TransformT extends PTransform<PBegin, OutputT>>
       implements PTransformOverrideFactory<PBegin, OutputT, TransformT> {
diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/PTransformMatchersTest.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/PTransformMatchersTest.java
index 094c9c8c68b..2acc8c81db0 100644
--- a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/PTransformMatchersTest.java
+++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/PTransformMatchersTest.java
@@ -83,7 +83,9 @@ import org.junit.runners.JUnit4;
 @RunWith(JUnit4.class)
 @SuppressWarnings({
   "rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class PTransformMatchersTest implements Serializable {
   @Rule
diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/graph/ProjectionPushdownOptimizerTest.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/graph/ProjectionPushdownOptimizerTest.java
index 03d49221e8c..81843d68256 100644
--- a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/graph/ProjectionPushdownOptimizerTest.java
+++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/graph/ProjectionPushdownOptimizerTest.java
@@ -104,8 +104,8 @@ public class ProjectionPushdownOptimizerTest {
         FieldAccessDescriptor.withFieldNames("foo", "bar");
     p.apply(source).apply(originalT).apply(new FieldAccessTransform(downstreamFieldAccess));
 
-    // TODO(BEAM-13658) Support pushdown on intermediate transforms.
-    // For now, test that the pushdown optimizer ignores immediate transforms.
+    // TODO(https://github.com/apache/beam/issues/21359) Support pushdown on intermediate
+    // transforms. For now, test that the pushdown optimizer ignores immediate transforms.
     ProjectionPushdownOptimizer.optimize(p);
     Assert.assertTrue(pipelineHasTransform(p, originalT));
   }
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/WatermarkHold.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/WatermarkHold.java
index baef5b5756c..60a30039914 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/WatermarkHold.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/WatermarkHold.java
@@ -73,7 +73,8 @@ class WatermarkHold<W extends BoundedWindow> implements Serializable {
       StateTags.makeSystemTagInternal(
           StateTags.watermarkStateInternal("extra", TimestampCombiner.EARLIEST));
 
-  // [BEAM-420] Seems likely these should all be transient or this class should not be Serializable
+  // [https://github.com/apache/beam/issues/18014] Seems likely these should all be transient or
+  // this class should not be Serializable
   @SuppressFBWarnings("SE_BAD_FIELD")
   private final TimerInternals timerInternals;
 
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/HistogramCell.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/HistogramCell.java
index 4f8894e9366..2a594401754 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/HistogramCell.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/HistogramCell.java
@@ -70,7 +70,8 @@ public class HistogramCell
     dirty.afterModification();
   }
 
-  // TODO(BEAM-12103): Update this function to allow incrementing the infinite buckets as well.
+  // TODO(https://github.com/apache/beam/issues/20853): Update this function to allow incrementing
+  // the infinite buckets as well.
   // and remove the incTopBucketCount and incBotBucketCount methods.
   // Using 0 and length -1 as the bucketIndex.
   public void incBucketCount(int bucketIndex, long count) {
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricsContainerStepMap.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricsContainerStepMap.java
index f3ec4d498fd..02296b5b0da 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricsContainerStepMap.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricsContainerStepMap.java
@@ -63,7 +63,8 @@ public class MetricsContainerStepMap implements Serializable {
   /** Returns the container for the given step name. */
   public MetricsContainerImpl getContainer(String stepName) {
     if (stepName == null) {
-      // TODO(BEAM-6538): Disallow this in the future, some tests rely on an empty step name today.
+      // TODO(https://github.com/apache/beam/issues/19275): Disallow this in the future, some tests
+      // rely on an empty step name today.
       return getUnboundContainer();
     }
     return metricsContainers.computeIfAbsent(
diff --git a/runners/core-java/src/test/java/org/apache/beam/runners/core/SimpleDoFnRunnerTest.java b/runners/core-java/src/test/java/org/apache/beam/runners/core/SimpleDoFnRunnerTest.java
index 5d065f417fb..791f2d17dd1 100644
--- a/runners/core-java/src/test/java/org/apache/beam/runners/core/SimpleDoFnRunnerTest.java
+++ b/runners/core-java/src/test/java/org/apache/beam/runners/core/SimpleDoFnRunnerTest.java
@@ -68,7 +68,9 @@ import org.mockito.MockitoAnnotations;
 @RunWith(JUnit4.class)
 @SuppressWarnings({
   "rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class SimpleDoFnRunnerTest {
   @Rule public ExpectedException thrown = ExpectedException.none();
diff --git a/runners/direct-java/build.gradle b/runners/direct-java/build.gradle
index cd23ec86fbc..86e30d4b9c2 100644
--- a/runners/direct-java/build.gradle
+++ b/runners/direct-java/build.gradle
@@ -249,7 +249,7 @@ task examplesIntegrationTest(type: Test) {
   testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs)
   useJUnit {
     filter{
-      // TODO (BEAM-14019) Fix integration Tests to run with DirectRunner: Timeout error
+      // TODO (https://github.com/apache/beam/issues/21344) Fix integration Tests to run with DirectRunner: Timeout error
       excludeTestsMatching 'org.apache.beam.examples.complete.TfIdfIT'
       excludeTestsMatching 'org.apache.beam.examples.WindowedWordCountIT.testWindowedWordCountInBatchDynamicSharding'
     }
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/BoundedReadEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/BoundedReadEvaluatorFactory.java
index 682e8193ec9..8f8d73d237b 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/BoundedReadEvaluatorFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/BoundedReadEvaluatorFactory.java
@@ -65,7 +65,8 @@ final class BoundedReadEvaluatorFactory implements TransformEvaluatorFactory {
   private final EvaluationContext evaluationContext;
   private final PipelineOptions options;
 
-  // TODO: (BEAM-723) Create a shared ExecutorService for maintenance tasks in the DirectRunner.
+  // TODO: (https://github.com/apache/beam/issues/18079) Create a shared ExecutorService for
+  // maintenance tasks in the DirectRunner.
   @VisibleForTesting
   final ExecutorService executor =
       Executors.newCachedThreadPool(
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRunner.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRunner.java
index 3afe7b99113..34ce76ed7c9 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRunner.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRunner.java
@@ -253,7 +253,8 @@ public class DirectRunner extends PipelineRunner<DirectPipelineResult> {
     // The last set of overrides includes GBK overrides used in WriteView
     pipeline.replaceAll(groupByKeyOverrides());
 
-    // TODO(BEAM-10670): Use SDF read as default when we address performance issue.
+    // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address
+    // performance issue.
     SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline);
   }
 
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java
index 0840b1c51e4..3441c037966 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java
@@ -147,7 +147,8 @@ final class ExecutorServiceParallelExecutor
   }
 
   @Override
-  // TODO: [BEAM-4563] Pass Future back to consumer to check for async errors
+  // TODO: [https://github.com/apache/beam/issues/18968] Pass Future back to consumer to check for
+  // async errors
   @SuppressWarnings("FutureReturnValueIgnored")
   public void start(DirectGraph graph, RootProviderRegistry rootProviderRegistry) {
     int numTargetSplits = Math.max(3, targetParallelism);
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutorServices.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutorServices.java
index abf54df5d59..7ec618f451e 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutorServices.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutorServices.java
@@ -70,7 +70,8 @@ final class TransformExecutorServices {
     }
 
     @Override
-    // TODO: [BEAM-4563] Pass Future back to consumer to check for async errors
+    // TODO: [https://github.com/apache/beam/issues/18968] Pass Future back to consumer to check for
+    // async errors
     @SuppressWarnings("FutureReturnValueIgnored")
     public void schedule(TransformExecutor work) {
       if (active.get()) {
@@ -154,7 +155,8 @@ final class TransformExecutorServices {
       workQueue.clear();
     }
 
-    // TODO: [BEAM-4563] Pass Future back to consumer to check for async errors
+    // TODO: [https://github.com/apache/beam/issues/18968] Pass Future back to consumer to check for
+    // async errors
     @SuppressWarnings("FutureReturnValueIgnored")
     private void updateCurrentlyEvaluating() {
       if (currentlyEvaluating.get() == null) {
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/StatefulParDoEvaluatorFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/StatefulParDoEvaluatorFactoryTest.java
index 0d24db06c62..41a602495ba 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/StatefulParDoEvaluatorFactoryTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/StatefulParDoEvaluatorFactoryTest.java
@@ -88,7 +88,9 @@ import org.mockito.MockitoAnnotations;
 @RunWith(JUnit4.class)
 @SuppressWarnings({
   "rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class StatefulParDoEvaluatorFactoryTest implements Serializable {
   @Mock private transient EvaluationContext mockEvaluationContext;
diff --git a/runners/flink/flink_runner.gradle b/runners/flink/flink_runner.gradle
index 7d44c44d416..fe164f0f148 100644
--- a/runners/flink/flink_runner.gradle
+++ b/runners/flink/flink_runner.gradle
@@ -225,7 +225,7 @@ class ValidatesRunnerConfig {
 }
 
 def sickbayTests = [
-        // TODO(BEAM-13573)
+        // TODO(https://github.com/apache/beam/issues/21306)
         'org.apache.beam.sdk.transforms.ParDoTest$TimestampTests.testOnWindowTimestampSkew',
 ]
 
@@ -358,9 +358,9 @@ tasks.register("examplesIntegrationTest", Test) {
   testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs)
   useJUnit {
     filter{
-      // TODO (BEAM-14019) Fix integration Tests to run with FlinkRunner: Assertion error
+      // TODO (https://github.com/apache/beam/issues/21344) Fix integration Tests to run with FlinkRunner: Assertion error
       excludeTestsMatching 'org.apache.beam.examples.WindowedWordCountIT.testWindowedWordCountInBatchDynamicSharding'
-      // TODO (BEAM-14019) Fix integration Tests to run with FlinkRunner: Error deleting table, Not found: Dataset
+      // TODO (https://github.com/apache/beam/issues/21344) Fix integration Tests to run with FlinkRunner: Error deleting table, Not found: Dataset
       excludeTestsMatching 'org.apache.beam.examples.cookbook.BigQueryTornadoesIT.testE2eBigQueryTornadoesWithStorageApiUsingQuery'
     }
   }
diff --git a/runners/flink/job-server/flink_job_server.gradle b/runners/flink/job-server/flink_job_server.gradle
index 58fc7682f60..b77e086a532 100644
--- a/runners/flink/job-server/flink_job_server.gradle
+++ b/runners/flink/job-server/flink_job_server.gradle
@@ -193,17 +193,17 @@ def portableValidatesRunnerTask(String name, boolean streaming, boolean checkpoi
         excludeCategories 'org.apache.beam.sdk.testing.UsesPerKeyOrderInBundle'
     },
     testFilter: {
-      // TODO(BEAM-10016)
+      // TODO(https://github.com/apache/beam/issues/20269)
       excludeTestsMatching 'org.apache.beam.sdk.transforms.FlattenTest.testFlattenWithDifferentInputAndOutputCoders2'
-      // TODO(BEAM-12039)
+      // TODO(https://github.com/apache/beam/issues/20843)
       excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testDiscardingMode'
-      // TODO(BEAM-12038)
+      // TODO(https://github.com/apache/beam/issues/20844)
       excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testLateDataAccumulating'
       // TODO(BEAM-12710)
       excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testFirstElementLate'
       // TODO(BEAM-13498)
       excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimestampTests.testProcessElementSkew'
-      // TODO(BEAM-13952)
+      // TODO(https://github.com/apache/beam/issues/21472)
       excludeTestsMatching 'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testAfterProcessingTimeContinuationTriggerUsingState'
     },
   )
diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkRunner.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkRunner.java
index 25668a6f99b..02bd2b3b93d 100644
--- a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkRunner.java
+++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkRunner.java
@@ -77,7 +77,8 @@ public class FlinkRunner extends PipelineRunner<PipelineResult> {
   @Override
   public PipelineResult run(Pipeline pipeline) {
     // Portable flink only support SDF as read.
-    // TODO(BEAM-10670): Use SDF read as default when we address performance issue.
+    // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address
+    // performance issue.
     if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) {
       SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline);
     }
diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkExecutableStageFunction.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkExecutableStageFunction.java
index 87e092e25ce..afc5ae106d9 100644
--- a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkExecutableStageFunction.java
+++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkExecutableStageFunction.java
@@ -173,7 +173,8 @@ public class FlinkExecutableStageFunction<InputT> extends AbstractRichFunction
             metricContainer.updateMetrics(stepName, response.getMonitoringInfosList());
           }
         };
-    // TODO(BEAM-11021): Support bundle finalization in portable batch.
+    // TODO(https://github.com/apache/beam/issues/19526): Support bundle finalization in portable
+    // batch.
     finalizationHandler =
         bundleId -> {
           throw new UnsupportedOperationException(
diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperator.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperator.java
index ee973f9621e..485d8b3c055 100644
--- a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperator.java
+++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperator.java
@@ -708,9 +708,9 @@ public class ExecutableStageDoFnOperator<InputT, OutputT> extends DoFnOperator<I
         // Manually drain processing time timers since Flink will ignore pending
         // processing-time timers when upstream operators have shut down and will also
         // shut down this operator with pending processing-time timers.
-        // TODO(BEAM-11210, FLINK-18647): It doesn't work efficiently when the watermark of upstream
-        // advances
-        // to MAX_TIMESTAMP immediately.
+        // TODO(https://github.com/apache/beam/issues/20600, FLINK-18647): It doesn't work
+        // efficiently when the watermark of upstream advances to MAX_TIMESTAMP
+        // immediately.
         if (numProcessingTimeTimers() > 0) {
           timerInternals.processPendingProcessingTimeTimers();
         }
diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkRequiresStableInputTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkRequiresStableInputTest.java
index b8f384502e1..5a71819f9aa 100644
--- a/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkRequiresStableInputTest.java
+++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkRequiresStableInputTest.java
@@ -121,7 +121,7 @@ public class FlinkRequiresStableInputTest {
    * restore the savepoint to check if we produce impotent results.
    */
   @Test(timeout = 30_000)
-  @Ignore("BEAM-13575")
+  @Ignore("https://github.com/apache/beam/issues/21333")
   public void testParDoRequiresStableInput() throws Exception {
     FlinkPipelineOptions options = FlinkPipelineOptions.defaults();
     options.setParallelism(1);
diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkSavepointTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkSavepointTest.java
index 410fa8e9d26..32fd7f8a106 100644
--- a/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkSavepointTest.java
+++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkSavepointTest.java
@@ -84,7 +84,9 @@ import org.slf4j.LoggerFactory;
  */
 @SuppressWarnings({
   "rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class FlinkSavepointTest implements Serializable {
 
diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkStreamingPipelineTranslatorTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkStreamingPipelineTranslatorTest.java
index b340b6e06c7..849f8be952c 100644
--- a/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkStreamingPipelineTranslatorTest.java
+++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkStreamingPipelineTranslatorTest.java
@@ -55,7 +55,8 @@ import org.junit.Assert;
 import org.junit.Test;
 
 /** Tests if overrides are properly applied. */
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 public class FlinkStreamingPipelineTranslatorTest {
 
diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/PortableStateExecutionTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/PortableStateExecutionTest.java
index d87de4e5451..84688938553 100644
--- a/runners/flink/src/test/java/org/apache/beam/runners/flink/PortableStateExecutionTest.java
+++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/PortableStateExecutionTest.java
@@ -58,7 +58,8 @@ import org.slf4j.LoggerFactory;
  * org.apache.beam.runners.flink.translation.wrappers.streaming.ExecutableStageDoFnOperator}.
  */
 @RunWith(Parameterized.class)
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 public class PortableStateExecutionTest implements Serializable {
 
diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/PortableTimersExecutionTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/PortableTimersExecutionTest.java
index f84763b79d3..828e23a96ee 100644
--- a/runners/flink/src/test/java/org/apache/beam/runners/flink/PortableTimersExecutionTest.java
+++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/PortableTimersExecutionTest.java
@@ -71,7 +71,8 @@ import org.slf4j.LoggerFactory;
  * of a given timer is run.
  */
 @RunWith(Parameterized.class)
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 public class PortableTimersExecutionTest implements Serializable {
 
diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperatorTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperatorTest.java
index fad521e9278..5ddef5935b2 100644
--- a/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperatorTest.java
+++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperatorTest.java
@@ -117,7 +117,9 @@ import org.powermock.reflect.Whitebox;
 @RunWith(JUnit4.class)
 @SuppressWarnings({
   "keyfor",
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class DoFnOperatorTest {
 
diff --git a/runners/google-cloud-dataflow-java/build.gradle b/runners/google-cloud-dataflow-java/build.gradle
index b2a2be9cdfc..84925cbd2bf 100644
--- a/runners/google-cloud-dataflow-java/build.gradle
+++ b/runners/google-cloud-dataflow-java/build.gradle
@@ -23,7 +23,7 @@ applyJavaNature(
   automaticModuleName: 'org.apache.beam.runners.dataflow',
   classesTriggerCheckerBugs: [
     'PrimitiveParDoSingleFactory': 'https://github.com/typetools/checker-framework/issues/3791',
-    // TODO(BEAM-12687): This currently crashes with checkerframework 3.10.0
+    // TODO(https://github.com/apache/beam/issues/21068): This currently crashes with checkerframework 3.10.0
     // when compiling :runners:google-cloud-dataflow-java:compileJava with:
     // message: class file for com.google.api.services.bigquery.model.TableRow not found
     // ; The Checker Framework crashed.  Please report the crash.
@@ -31,7 +31,7 @@ applyJavaNature(
     // Last visited tree at line 57 column 1:
     // @AutoService(CoderCloudObjectTranslatorRegistrar.class)
     // Exception: com.sun.tools.javac.code.Symbol$CompletionFailure: class file for com.google.api.services.bigquery.model.TableRow not found; com.sun.tools.javac.code.Symbol$CompletionFailure: class file for com.google.api.services.bigquery.model.TableRow not found
-    'DefaultCoderCloudObjectTranslatorRegistrar': 'TODO(BEAM-12687): Report the crash if still occurring on newest version',
+    'DefaultCoderCloudObjectTranslatorRegistrar': 'TODO(https://github.com/apache/beam/issues/21068): Report the crash if still occurring on newest version',
   ],
 )
 
@@ -161,7 +161,7 @@ def runnerV2PipelineOptions = [
   "--region=${dataflowRegion}",
   "--tempRoot=${dataflowValidatesTempRoot}",
   "--sdkContainerImage=${dockerJavaImageContainer}:${dockerTag}",
-  // TODO(BEAM-11779) remove shuffle_mode=appliance with runner v2 once issue is resolved.
+  // TODO(https://github.com/apache/beam/issues/20806) remove shuffle_mode=appliance with runner v2 once issue is resolved.
   "--experiments=use_unified_worker,use_runner_v2,shuffle_mode=appliance",
 ]
 
@@ -371,7 +371,7 @@ task validatesRunner {
   dependsOn(createLegacyWorkerValidatesRunnerTest(
     name: 'validatesRunnerLegacyWorkerTest',
     excludedTests: [
-      // TODO(BEAM-13952)
+      // TODO(https://github.com/apache/beam/issues/21472)
       'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testAfterProcessingTimeContinuationTriggerUsingState',
     ]
   ))
@@ -390,7 +390,7 @@ task validatesRunnerStreaming {
       'org.apache.beam.sdk.testing.UsesSetState',
     ],
     excludedTests: [
-      // TODO(BEAM-13952)
+      // TODO(https://github.com/apache/beam/issues/21472)
       'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testAfterProcessingTimeContinuationTriggerUsingState'
     ]
   ))
@@ -418,7 +418,7 @@ createCrossLanguageValidatesRunnerTask(
     "--project=${dataflowProject}",
     "--region=${dataflowRegion}",
     "--sdk_harness_container_image_overrides=.*java.*,${dockerJavaImageContainer}:${dockerTag}",
-    // TODO(BEAM-11779) remove shuffle_mode=appliance with runner v2 once issue is resolved
+    // TODO(https://github.com/apache/beam/issues/20806) remove shuffle_mode=appliance with runner v2 once issue is resolved
     "--experiments=shuffle_mode=appliance",
   ],
   javaPipelineOptions: [
@@ -428,7 +428,7 @@ createCrossLanguageValidatesRunnerTask(
     "--tempRoot=${dataflowValidatesTempRoot}",
     "--sdkContainerImage=${dockerJavaImageContainer}:${dockerTag}",
     "--sdkHarnessContainerImageOverrides=.*python.*,${dockerPythonImageContainer}:${dockerTag}",
-    // TODO(BEAM-11779) remove shuffle_mode=appliance with runner v2 once issue is resolved.
+    // TODO(https://github.com/apache/beam/issues/20806) remove shuffle_mode=appliance with runner v2 once issue is resolved.
     "--experiments=shuffle_mode=appliance",
   ],
   pytestOptions: [
@@ -481,12 +481,12 @@ task validatesRunnerV2 {
       'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testCombiningAccumulatingProcessingTime',
       'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testLargeKeys100MB',
       'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testLargeKeys10MB',
-      // TODO(BEAM-12353): Identify whether it's bug or a feature gap.
+      // TODO(https://github.com/apache/beam/issues/20931): Identify whether it's bug or a feature gap.
       'org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.testRewindowWithTimestampCombiner',
 
       'org.apache.beam.sdk.transforms.FlattenTest.testFlattenWithDifferentInputAndOutputCoders2',
 
-      // TODO(BEAM-13952)
+      // TODO(https://github.com/apache/beam/issues/21472)
       'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testAfterProcessingTimeContinuationTriggerUsingState',
     ]
   ))
@@ -502,7 +502,7 @@ task validatesRunnerV2Streaming {
       'org.apache.beam.sdk.testing.LargeKeys$Above10KB',
       'org.apache.beam.sdk.testing.UsesBoundedSplittableParDo',
       'org.apache.beam.sdk.testing.UsesCommittedMetrics',
-      'org.apache.beam.sdk.testing.UsesStrictTimerOrdering' /* BEAM-8543 */,
+      'org.apache.beam.sdk.testing.UsesStrictTimerOrdering' /* https://github.com/apache/beam/issues/19957 */,
       'org.apache.beam.sdk.testing.UsesTestStream',
       'org.apache.beam.sdk.testing.UsesOnWindowExpiration',
     ],
@@ -518,7 +518,7 @@ task validatesRunnerV2Streaming {
       'org.apache.beam.sdk.extensions.sql.BeamSqlDslAggregationTest.testTriggeredTumble',
       'org.apache.beam.sdk.transforms.ReshuffleTest.testReshuffleWithTimestampsStreaming',
 
-      // TODO(BEAM-11858) reading a side input twice fails
+      // TODO(https://github.com/apache/beam/issues/20726) reading a side input twice fails
       'org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.testSameSideInputReadTwice',
       'org.apache.beam.sdk.transforms.CombineFnsTest.testComposedCombineWithContext',
       'org.apache.beam.sdk.transforms.CombineTest$CombineWithContextTests.testSimpleCombineWithContextEmpty',
@@ -531,7 +531,7 @@ task validatesRunnerV2Streaming {
 
       'org.apache.beam.sdk.transforms.GroupByKeyTest.testCombiningAccumulatingProcessingTime',
 
-      // TODO(BEAM-11306): Pipeline is hanging for these 3 tests.
+      // TODO(https://github.com/apache/beam/issues/20601): Pipeline is hanging for these 3 tests.
       'org.apache.beam.sdk.transforms.SplittableDoFnTest.testPairWithIndexBasicUnbounded',
       'org.apache.beam.sdk.transforms.SplittableDoFnTest.testOutputAfterCheckpointUnbounded',
       'org.apache.beam.sdk.transforms.SplittableDoFnTest.testBundleFinalizationOccursOnUnboundedSplittableDoFn',
@@ -550,7 +550,7 @@ task validatesRunnerV2Streaming {
       'org.apache.beam.sdk.transforms.WaitTest.testWaitBoundedInDefaultWindow',
       'org.apache.beam.sdk.transforms.WaitTest.testWaitWithSomeSignalWindowsEmpty',
 
-      // TODO(BEAM-3245): respect ParDo lifecycle.
+      // TODO(https://github.com/apache/beam/issues/18592): respect ParDo lifecycle.
       'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInFinishBundle',
       'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInFinishBundleStateful',
       'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInProcessElement',
@@ -560,20 +560,20 @@ task validatesRunnerV2Streaming {
       'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInStartBundle',
       'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInStartBundleStateful',
 
-      // TODO(BEAM-11917) Empty flatten fails in streaming
+      // TODO(https://github.com/apache/beam/issues/20734) Empty flatten fails in streaming
       "org.apache.beam.sdk.transforms.FlattenTest.testEmptyFlattenAsSideInput",
       "org.apache.beam.sdk.transforms.FlattenTest.testFlattenPCollectionsEmptyThenParDo",
       "org.apache.beam.sdk.transforms.FlattenTest.testFlattenPCollectionsEmpty",
 
       'org.apache.beam.sdk.io.CountingSourceTest.testBoundedSourceSplits',
 
-      // TODO(BEAM-12353): Identify whether it's bug or a feature gap.
+      // TODO(https://github.com/apache/beam/issues/20931): Identify whether it's bug or a feature gap.
       'org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.testRewindowWithTimestampCombiner',
 
-      // TODO(BEAM-13525)
+      // TODO(https://github.com/apache/beam/issues/21424)
       'org.apache.beam.sdk.transforms.ParDoTest$TimestampTests.testProcessElementSkew',
       'org.apache.beam.sdk.transforms.ParDoTest$TimestampTests.testOnWindowTimestampSkew',
-      // TODO(BEAM-13952)
+      // TODO(https://github.com/apache/beam/issues/21472)
       'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testAfterProcessingTimeContinuationTriggerUsingState',
     ]
   ))
@@ -697,7 +697,7 @@ task examplesJavaRunnerV2IntegrationTest(type: Test) {
   exclude '**/WindowedWordCountIT.class'
   exclude '**/TopWikipediaSessionsIT.class'
   exclude '**/AutoCompleteIT.class'
-  // TODO(BEAM-11201): test times out.
+  // TODO(https://github.com/apache/beam/issues/20593): test times out.
   exclude '**/FhirIOReadIT.class'
 
   maxParallelForks 4
diff --git a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverridesTest.java b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverridesTest.java
index 5950d7b4fe0..a77c540a48c 100644
--- a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverridesTest.java
+++ b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverridesTest.java
@@ -58,7 +58,8 @@ import org.junit.runners.JUnit4;
 
 /** Tests for {@link BatchStatefulParDoOverrides}. */
 @RunWith(JUnit4.class)
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 public class BatchStatefulParDoOverridesTest implements Serializable {
 
diff --git a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowRunnerTest.java b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowRunnerTest.java
index afd75bb843c..9bd6a5d9719 100644
--- a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowRunnerTest.java
+++ b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowRunnerTest.java
@@ -186,7 +186,8 @@ import org.powermock.modules.junit4.PowerMockRunner;
  * <p>Implements {@link Serializable} because it is caught in closures.
  */
 @RunWith(JUnit4.class)
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 public class DataflowRunnerTest implements Serializable {
 
diff --git a/runners/google-cloud-dataflow-java/worker/build.gradle b/runners/google-cloud-dataflow-java/worker/build.gradle
index f879eefb2bf..94dd383155b 100644
--- a/runners/google-cloud-dataflow-java/worker/build.gradle
+++ b/runners/google-cloud-dataflow-java/worker/build.gradle
@@ -124,8 +124,8 @@ dependencies {
   shadowTest library.java.mockito_core
 }
 
-//TODO(BEAM-5657): checktyle task should be enabled in the future.
+//TODO(https://github.com/apache/beam/issues/19115): checktyle task should be enabled in the future.
 checkstyleMain.enabled = false
 checkstyleTest.enabled = false
-//TODO(BEAM-5659): javadoc task should be enabled in the future.
+//TODO(https://github.com/apache/beam/issues/19119): javadoc task should be enabled in the future.
 javadoc.enabled = false
diff --git a/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle b/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle
index 34d3542383a..82ec50b2eb9 100644
--- a/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle
+++ b/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle
@@ -90,7 +90,7 @@ applyJavaNature(
         shadowJarValidationExcludes: [
             "org/apache/beam/runners/dataflow/worker/**",
             "org/apache/beam/repackaged/beam_runners_google_cloud_dataflow_java_legacy_worker/**",
-            // TODO(BEAM-6137): Move DataflowRunnerHarness class under org.apache.beam.runners.dataflow.worker namespace
+            // TODO(https://github.com/apache/beam/issues/19114): Move DataflowRunnerHarness class under org.apache.beam.runners.dataflow.worker namespace
             "com/google/cloud/dataflow/worker/DataflowRunnerHarness.class",
             // Allow slf4j implementation worker for logging during pipeline execution
             "org/slf4j/impl/**"
@@ -267,8 +267,8 @@ project.task('validateShadedJarContainsSlf4jJdk14', dependsOn: 'shadowJar') {
 
 tasks.check.dependsOn project.tasks.validateShadedJarContainsSlf4jJdk14
 
-//TODO(BEAM-5657): checktyle task should be enabled in the future.
+//TODO(https://github.com/apache/beam/issues/19115): checktyle task should be enabled in the future.
 checkstyleMain.enabled = false
 checkstyleTest.enabled = false
-//TODO(BEAM-5659): javadoc task should be enabled in the future.
+//TODO(https://github.com/apache/beam/issues/19119): javadoc task should be enabled in the future.
 javadoc.enabled = false
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/BatchModeExecutionContext.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/BatchModeExecutionContext.java
index f14afbc3779..efdfea0de01 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/BatchModeExecutionContext.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/BatchModeExecutionContext.java
@@ -68,7 +68,8 @@ public class BatchModeExecutionContext
 
   private final MetricsContainerRegistry<MetricsContainerImpl> containerRegistry;
 
-  // TODO(BEAM-7863): Move throttle time Metric to a dedicated namespace.
+  // TODO(https://github.com/apache/beam/issues/19632): Move throttle time Metric to a dedicated
+  // namespace.
   protected static final String DATASTORE_THROTTLE_TIME_NAMESPACE =
       "org.apache.beam.sdk.io.gcp.datastore.DatastoreV1$DatastoreWriterFn";
   protected static final String HTTP_CLIENT_API_THROTTLE_TIME_NAMESPACE =
@@ -533,7 +534,8 @@ public class BatchModeExecutionContext
   public Long extractThrottleTime() {
     long totalThrottleMsecs = 0L;
     for (MetricsContainerImpl container : containerRegistry.getContainers()) {
-      // TODO(BEAM-7863): Update throttling counters to use generic throttling-msecs metric.
+      // TODO(https://github.com/apache/beam/issues/19632): Update throttling counters to use
+      // generic throttling-msecs metric.
       CounterCell dataStoreThrottlingTime =
           container.tryGetCounter(
               MetricName.named(DATASTORE_THROTTLE_TIME_NAMESPACE, THROTTLE_TIME_COUNTER_NAME));
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java
index 6552dd620e8..55b0f5d7c9f 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java
@@ -209,7 +209,8 @@ public class StreamingDataflowWorker {
   /** Maximum number of failure stacktraces to report in each update sent to backend. */
   private static final int MAX_FAILURES_TO_REPORT_IN_UPDATE = 1000;
 
-  // TODO(BEAM-7863): Update throttling counters to use generic throttling-msecs metric.
+  // TODO(https://github.com/apache/beam/issues/19632): Update throttling counters to use generic
+  // throttling-msecs metric.
   public static final MetricName BIGQUERY_STREAMING_INSERT_THROTTLE_TIME =
       MetricName.named(
           "org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$DatasetServiceImpl",
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/CreateExecutableStageNodeFunction.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/CreateExecutableStageNodeFunction.java
index 3e769f894e3..ee79ee39a46 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/CreateExecutableStageNodeFunction.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/CreateExecutableStageNodeFunction.java
@@ -257,7 +257,8 @@ public class CreateExecutableStageNodeFunction
                   .setSpec(RunnerApi.FunctionSpec.newBuilder().setPayload(output.toByteString()))
                   .build());
           // For non-java coder, hope it's GlobalWindows by default.
-          // TODO(BEAM-6231): Actually discover the right windowing strategy.
+          // TODO(https://github.com/apache/beam/issues/19363): Actually discover the right
+          // windowing strategy.
           windowingStrategyId = globalWindowingStrategyId;
         }
       } catch (IOException e) {
@@ -268,7 +269,8 @@ public class CreateExecutableStageNodeFunction
             e);
       }
 
-      // TODO(BEAM-6275): Set correct IsBounded on generated PCollections
+      // TODO(https://github.com/apache/beam/issues/19297): Set correct IsBounded on generated
+      // PCollections
       String pcollectionId = node.getPcollectionId();
       RunnerApi.PCollection pCollection =
           RunnerApi.PCollection.newBuilder()
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/IsmSideInputReaderTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/IsmSideInputReaderTest.java
index 35bc6292e44..5d4a058c3b2 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/IsmSideInputReaderTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/IsmSideInputReaderTest.java
@@ -1551,7 +1551,7 @@ public class IsmSideInputReaderTest {
     }
   }
 
-  // TODO(BEAM-13460): Add assertions on contains() calls
+  // TODO(https://github.com/apache/beam/issues/21294): Add assertions on contains() calls
   @SuppressWarnings("ReturnValueIgnored")
   private static <T> void verifyMap(
       Map<byte[], T> expectedMap, Map<byte[], T> mapView, Comparator<T> valueComparator) {
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java
index 540b15061af..338a1d7eb14 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java
@@ -172,7 +172,8 @@ import org.slf4j.LoggerFactory;
 
 /** Unit tests for {@link StreamingDataflowWorker}. */
 @RunWith(Parameterized.class)
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 public class StreamingDataflowWorkerTest {
 
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/UserParDoFnFactoryTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/UserParDoFnFactoryTest.java
index ca6e94b18ff..c8c46bf6cae 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/UserParDoFnFactoryTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/UserParDoFnFactoryTest.java
@@ -77,7 +77,8 @@ import org.junit.runners.JUnit4;
 
 /** Tests for {@link UserParDoFnFactory}. */
 @RunWith(JUnit4.class)
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 public class UserParDoFnFactoryTest {
   static class TestDoFn extends DoFn<Integer, String> {
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/SdkHarnessClient.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/SdkHarnessClient.java
index 4c6d88db2f8..9a9752a3e3c 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/SdkHarnessClient.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/SdkHarnessClient.java
@@ -512,9 +512,10 @@ public class SdkHarnessClient implements AutoCloseable {
               finalizationHandler.requestsFinalization(bundleId);
             }
           } else {
-            // TODO: [BEAM-3962] Handle aborting the bundle being processed.
+            // TODO: [https://github.com/apache/beam/issues/18756] Handle aborting the bundle being
+            // processed.
             throw new IllegalStateException(
-                "Processing bundle failed, TODO: [BEAM-3962] abort bundle.");
+                "Processing bundle failed, TODO: [https://github.com/apache/beam/issues/18756] abort bundle.");
           }
         } catch (Exception e) {
           if (exception == null) {
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/environment/DockerEnvironmentFactory.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/environment/DockerEnvironmentFactory.java
index edeaeab3393..ee816a944e5 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/environment/DockerEnvironmentFactory.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/environment/DockerEnvironmentFactory.java
@@ -179,7 +179,7 @@ public class DockerEnvironmentFactory implements EnvironmentFactory {
         firstNonNull(
             System.getenv("CLOUDSDK_CONFIG"),
             Paths.get(System.getProperty("user.home"), ".config", "gcloud").toString());
-    // TODO(BEAM-4729): Allow this to be disabled manually.
+    // TODO(https://github.com/apache/beam/issues/19061): Allow this to be disabled manually.
     if (Files.exists(Paths.get(localGcloudConfig))) {
       return ImmutableList.of(
           "--mount",
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/provisioning/StaticGrpcProvisionService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/provisioning/StaticGrpcProvisionService.java
index b274890edf6..f6475603141 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/provisioning/StaticGrpcProvisionService.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/provisioning/StaticGrpcProvisionService.java
@@ -59,7 +59,7 @@ public class StaticGrpcProvisionService extends ProvisionServiceGrpc.ProvisionSe
       ProvisionApi.GetProvisionInfoRequest request,
       StreamObserver<GetProvisionInfoResponse> responseObserver) {
     if (!environments.containsKey(headerAccessor.getSdkWorkerId())) {
-      // TODO(BEAM-9818): Remove once the JRH is gone.
+      // TODO(https://github.com/apache/beam/issues/20253): Remove once the JRH is gone.
       responseObserver.onNext(GetProvisionInfoResponse.newBuilder().setInfo(info).build());
       responseObserver.onCompleted();
       return;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/ProcessBundleDescriptorsTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/ProcessBundleDescriptorsTest.java
index e6a3e3f6a7d..788fc38af54 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/ProcessBundleDescriptorsTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/ProcessBundleDescriptorsTest.java
@@ -63,7 +63,8 @@ import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterable
 import org.junit.Test;
 
 /** Tests for {@link ProcessBundleDescriptors}. */
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 public class ProcessBundleDescriptorsTest implements Serializable {
 
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/RemoteExecutionTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/RemoteExecutionTest.java
index 396a67fb6ca..21052c9ff6a 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/RemoteExecutionTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/RemoteExecutionTest.java
@@ -162,7 +162,9 @@ import org.junit.runners.JUnit4;
 @SuppressWarnings({
   "rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
   "keyfor",
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class RemoteExecutionTest implements Serializable {
 
diff --git a/runners/samza/build.gradle b/runners/samza/build.gradle
index e4ae3c4f17d..528b4824eec 100644
--- a/runners/samza/build.gradle
+++ b/runners/samza/build.gradle
@@ -87,25 +87,25 @@ configurations.all {
 }
 
 def sickbayTests = [
-        // TODO(BEAM-12750)
+        // TODO(https://github.com/apache/beam/issues/21033)
         'org.apache.beam.sdk.transforms.GroupIntoBatchesTest.testInGlobalWindowBatchSizeByteSizeFn',
         'org.apache.beam.sdk.transforms.GroupIntoBatchesTest.testInStreamingMode',
         'org.apache.beam.sdk.transforms.GroupIntoBatchesTest.testWithShardedKeyInGlobalWindow',
-        // TODO(BEAM-12749)
+        // TODO(https://github.com/apache/beam/issues/21036)
         'org.apache.beam.sdk.transforms.MapElementsTest.testMapSimpleFunction',
-        // TODO(BEAM-12748)
+        // TODO(https://github.com/apache/beam/issues/21035)
         'org.apache.beam.sdk.transforms.ViewTest.testEmptySingletonSideInput',
         'org.apache.beam.sdk.transforms.ViewTest.testNonSingletonSideInput',
-        // TODO(BEAM-12747)
+        // TODO(https://github.com/apache/beam/issues/21037)
         'org.apache.beam.sdk.transforms.WithTimestampsTest.withTimestampsBackwardsInTimeShouldThrow',
         'org.apache.beam.sdk.transforms.WithTimestampsTest.withTimestampsWithNullTimestampShouldThrow',
-        // TODO(BEAM-12746)
+        // TODO(https://github.com/apache/beam/issues/21039)
         'org.apache.beam.sdk.io.FileIOTest*',
-        // TODO(BEAM-12745)
+        // TODO(https://github.com/apache/beam/issues/21038)
         'org.apache.beam.sdk.io.AvroIOTest*',
-        // TODO(BEAM-12744)
+        // TODO(https://github.com/apache/beam/issues/21040)
         'org.apache.beam.sdk.PipelineTest.testEmptyPipeline',
-        // TODO(BEAM-12743)
+        // TODO(https://github.com/apache/beam/issues/21041)
         'org.apache.beam.sdk.coders.PCollectionCustomCoderTest.testEncodingNPException',
         'org.apache.beam.sdk.coders.PCollectionCustomCoderTest.testEncodingIOException',
         'org.apache.beam.sdk.coders.PCollectionCustomCoderTest.testDecodingNPException',
@@ -148,13 +148,13 @@ tasks.register("validatesRunner", Test) {
     }
     // TODO(BEAM-10025)
     excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testOutputTimestampDefaultUnbounded'
-    // TODO(BEAM-11479)
+    // TODO(https://github.com/apache/beam/issues/20703)
     excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testOutputTimestamp'
-    // TODO(BEAM-11479)
+    // TODO(https://github.com/apache/beam/issues/20703)
     excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testRelativeTimerWithOutputTimestamp'
-    // TODO(BEAM-12035)
+    // TODO(https://github.com/apache/beam/issues/20847)
     excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testFirstElementLate'
-    // TODO(BEAM-12036)
+    // TODO(https://github.com/apache/beam/issues/20846)
     excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testLateDataAccumulating'
 
     // These tests fail since there is no support for side inputs in Samza's unbounded splittable DoFn integration
diff --git a/runners/samza/job-server/build.gradle b/runners/samza/job-server/build.gradle
index 8234c94bdef..c1c74701a6d 100644
--- a/runners/samza/job-server/build.gradle
+++ b/runners/samza/job-server/build.gradle
@@ -101,70 +101,70 @@ def portableValidatesRunnerTask(String name, boolean docker) {
             excludeCategories 'org.apache.beam.sdk.testing.UsesOrderedListState'
             excludeCategories 'org.apache.beam.sdk.testing.UsesBoundedSplittableParDo'
             excludeCategories 'org.apache.beam.sdk.testing.UsesTestStreamWithProcessingTime'
-            // TODO(BEAM-12821)
+            // TODO(https://github.com/apache/beam/issues/21023)
             excludeCategories 'org.apache.beam.sdk.testing.UsesTestStreamWithMultipleStages'
             excludeCategories 'org.apache.beam.sdk.testing.UsesUnboundedSplittableParDo'
             excludeCategories 'org.apache.beam.sdk.testing.UsesLoopingTimer'
         },
         testFilter: {
-            // TODO(BEAM-12677)
+            // TODO(https://github.com/apache/beam/issues/21042)
             excludeTestsMatching "org.apache.beam.sdk.transforms.FlattenTest.testFlattenWithDifferentInputAndOutputCoders2"
             excludeTestsMatching "org.apache.beam.sdk.transforms.FlattenTest.testEmptyFlattenAsSideInput"
             excludeTestsMatching "org.apache.beam.sdk.transforms.FlattenTest.testFlattenPCollectionsEmptyThenParDo"
             excludeTestsMatching "org.apache.beam.sdk.transforms.FlattenTest.testFlattenPCollectionsEmpty"
             // TODO(BEAM-10025)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testOutputTimestampDefaultUnbounded'
-            // TODO(BEAM-11479)
+            // TODO(https://github.com/apache/beam/issues/20703)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testOutputTimestamp'
-            // TODO(BEAM-12035)
+            // TODO(https://github.com/apache/beam/issues/20847)
             excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testFirstElementLate'
-            // TODO(BEAM-12036)
+            // TODO(https://github.com/apache/beam/issues/20846)
             excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testLateDataAccumulating'
-            // TODO(BEAM-12886)
+            // TODO(https://github.com/apache/beam/issues/21142)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.testWindowFnPostMerging'
-            // TODO(BEAM-12887)
+            // TODO(https://github.com/apache/beam/issues/21143)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimestampTests.testParDoShiftTimestampInvalid'
-            // TODO(BEAM-12888)
+            // TODO(https://github.com/apache/beam/issues/21144)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimestampTests.testParDoShiftTimestampInvalidZeroAllowed'
-            // TODO(BEAM-12889)
+            // TODO(https://github.com/apache/beam/issues/21145)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.DeduplicateTest.testEventTime'
-            // TODO(BEAM-12890)
+            // TODO(https://github.com/apache/beam/issues/21146)
             excludeTestsMatching 'org.apache.beam.sdk.io.TFRecordIOTest.testReadInvalidRecord'
-            // TODO(BEAM-12891)
+            // TODO(https://github.com/apache/beam/issues/21147)
             excludeTestsMatching 'org.apache.beam.sdk.io.TFRecordIOTest.testReadInvalidDataMask'
-            // TODO(BEAM-12892)
+            // TODO(https://github.com/apache/beam/issues/21148)
             excludeTestsMatching 'org.apache.beam.sdk.io.TFRecordIOTest.testReadInvalidLengthMask'
-            // TODO(BEAM-12893)
+            // TODO(https://github.com/apache/beam/issues/21149)
             excludeTestsMatching 'org.apache.beam.sdk.io.TextIOReadTest$CompressedReadTest.testCompressedReadWithoutExtension'
-            // TODO(BEAM-12894)
+            // TODO(https://github.com/apache/beam/issues/21150)
             excludeTestsMatching 'org.apache.beam.sdk.io.WriteFilesTest.testWithRunnerDeterminedShardingUnbounded'
-            // TODO(BEAM-128945)
+            // TODO(https://github.com/apache/beam/issues/211505)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.testParDoWritingToUndeclaredTag'
-            // TODO(BEAM-12896)
+            // TODO(https://github.com/apache/beam/issues/21152)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.testParDoReadingFromUnknownSideInput'
-            // TODO(BEAM-12897)
+            // TODO(https://github.com/apache/beam/issues/21153)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.ViewTest.testMapSideInputWithNullValuesCatchesDuplicates'
 
-            // TODO(BEAM-12743)
+            // TODO(https://github.com/apache/beam/issues/21041)
             excludeTestsMatching 'org.apache.beam.sdk.coders.PCollectionCustomCoderTest.testEncodingNPException'
             excludeTestsMatching 'org.apache.beam.sdk.coders.PCollectionCustomCoderTest.testEncodingIOException'
             excludeTestsMatching 'org.apache.beam.sdk.coders.PCollectionCustomCoderTest.testDecodingNPException'
             excludeTestsMatching 'org.apache.beam.sdk.coders.PCollectionCustomCoderTest.testDecodingIOException'
-            // TODO(BEAM-12744)
+            // TODO(https://github.com/apache/beam/issues/21040)
             excludeTestsMatching 'org.apache.beam.sdk.PipelineTest.testEmptyPipeline'
-            // TODO(BEAM-12745)
+            // TODO(https://github.com/apache/beam/issues/21038)
             excludeTestsMatching 'org.apache.beam.sdk.io.AvroIOTest*'
-            // TODO(BEAM-12746)
+            // TODO(https://github.com/apache/beam/issues/21039)
             excludeTestsMatching 'org.apache.beam.sdk.io.FileIOTest*'
-            // TODO(BEAM-12747)
+            // TODO(https://github.com/apache/beam/issues/21037)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.WithTimestampsTest.withTimestampsBackwardsInTimeShouldThrow'
             excludeTestsMatching 'org.apache.beam.sdk.transforms.WithTimestampsTest.withTimestampsWithNullTimestampShouldThrow'
-            // TODO(BEAM-12748)
+            // TODO(https://github.com/apache/beam/issues/21035)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.ViewTest.testEmptySingletonSideInput'
             excludeTestsMatching 'org.apache.beam.sdk.transforms.ViewTest.testNonSingletonSideInput'
-            // TODO(BEAM-12749)
+            // TODO(https://github.com/apache/beam/issues/21036)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.MapElementsTest.testMapSimpleFunction'
-            // TODO(BEAM-12750)
+            // TODO(https://github.com/apache/beam/issues/21033)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.GroupIntoBatchesTest.testInGlobalWindowBatchSizeByteSizeFn'
             excludeTestsMatching 'org.apache.beam.sdk.transforms.GroupIntoBatchesTest.testInStreamingMode'
             excludeTestsMatching 'org.apache.beam.sdk.transforms.GroupIntoBatchesTest.testWithShardedKeyInGlobalWindow'
@@ -172,9 +172,9 @@ def portableValidatesRunnerTask(String name, boolean docker) {
             excludeTestsMatching 'org.apache.beam.sdk.transforms.GroupIntoBatchesTest.testInGlobalWindowBatchSizeByteSize'
             // TODO(BEAM-10025)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testOutputTimestampDefaultUnbounded'
-            // TODO(BEAM-11479)
+            // TODO(https://github.com/apache/beam/issues/20703)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testOutputTimestamp'
-            // TODO(BEAM-11479)
+            // TODO(https://github.com/apache/beam/issues/20703)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testRelativeTimerWithOutputTimestamp'
             // TODO(BEAM-13498)
             excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimestampTests.testProcessElementSkew'
diff --git a/runners/samza/src/main/java/org/apache/beam/runners/samza/SamzaRunner.java b/runners/samza/src/main/java/org/apache/beam/runners/samza/SamzaRunner.java
index dd7dd68627a..a45448b305e 100644
--- a/runners/samza/src/main/java/org/apache/beam/runners/samza/SamzaRunner.java
+++ b/runners/samza/src/main/java/org/apache/beam/runners/samza/SamzaRunner.java
@@ -114,8 +114,8 @@ public class SamzaRunner extends PipelineRunner<SamzaPipelineResult> {
 
   @Override
   public SamzaPipelineResult run(Pipeline pipeline) {
-    // TODO(BEAM-10670): Use SDF read as default for non-portable execution when we address
-    // performance issue.
+    // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default for non-portable
+    // execution when we address performance issue.
     if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) {
       SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline);
     }
diff --git a/runners/samza/src/main/java/org/apache/beam/runners/samza/metrics/SamzaMetricsContainer.java b/runners/samza/src/main/java/org/apache/beam/runners/samza/metrics/SamzaMetricsContainer.java
index d1ce4c1b257..7415735d44f 100644
--- a/runners/samza/src/main/java/org/apache/beam/runners/samza/metrics/SamzaMetricsContainer.java
+++ b/runners/samza/src/main/java/org/apache/beam/runners/samza/metrics/SamzaMetricsContainer.java
@@ -72,7 +72,7 @@ public class SamzaMetricsContainer {
     final GaugeUpdater updateGauge = new GaugeUpdater();
     results.getGauges().forEach(updateGauge);
 
-    // TODO(BEAM-12614): add distribution metrics to Samza
+    // TODO(https://github.com/apache/beam/issues/21043): add distribution metrics to Samza
   }
 
   private class CounterUpdater implements Consumer<MetricResult<Long>> {
diff --git a/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/ReshuffleTranslator.java b/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/ReshuffleTranslator.java
index c7f8acc22f5..62bc2224f35 100644
--- a/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/ReshuffleTranslator.java
+++ b/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/ReshuffleTranslator.java
@@ -115,7 +115,7 @@ public class ReshuffleTranslator<K, InT, OutT>
     @Override
     public boolean test(RunnerApi.PTransform pTransform) {
       return false;
-      // Re-enable after BEAM-12999 is completed
+      // Re-enable after https://github.com/apache/beam/issues/21188 is completed
       //       return PTransformTranslation.RESHUFFLE_URN.equals(
       //          PTransformTranslation.urnForTransformOrNull(pTransform));
     }
diff --git a/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/SamzaPortablePipelineTranslator.java b/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/SamzaPortablePipelineTranslator.java
index b3218eff5bf..e5bc2cd8f91 100644
--- a/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/SamzaPortablePipelineTranslator.java
+++ b/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/SamzaPortablePipelineTranslator.java
@@ -103,7 +103,7 @@ public class SamzaPortablePipelineTranslator {
     @Override
     public Map<String, TransformTranslator<?>> getTransformTranslators() {
       return ImmutableMap.<String, TransformTranslator<?>>builder()
-          // Re-enable after BEAM-12999 is completed
+          // Re-enable after https://github.com/apache/beam/issues/21188 is completed
           //          .put(PTransformTranslation.RESHUFFLE_URN, new ReshuffleTranslator<>())
           .put(PTransformTranslation.GROUP_BY_KEY_TRANSFORM_URN, new GroupByKeyTranslator<>())
           .put(PTransformTranslation.FLATTEN_TRANSFORM_URN, new FlattenPCollectionsTranslator<>())
diff --git a/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/SamzaTransformOverrides.java b/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/SamzaTransformOverrides.java
index 94100689caa..df18ada4984 100644
--- a/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/SamzaTransformOverrides.java
+++ b/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/SamzaTransformOverrides.java
@@ -50,7 +50,8 @@ public class SamzaTransformOverrides {
                 PTransformMatchers.splittableProcessKeyedBounded(),
                 new SplittableParDoNaiveBounded.OverrideFactory()))
 
-        // TODO: [BEAM-5362] Support @RequiresStableInput on Samza runner
+        // TODO: [https://github.com/apache/beam/issues/19132] Support @RequiresStableInput on Samza
+        // runner
         .add(
             PTransformOverride.of(
                 PTransformMatchers.requiresStableInputParDoMulti(),
diff --git a/runners/samza/src/test/java/org/apache/beam/runners/samza/runtime/SamzaStoreStateInternalsTest.java b/runners/samza/src/test/java/org/apache/beam/runners/samza/runtime/SamzaStoreStateInternalsTest.java
index e7116a4f0c2..457a43e53e4 100644
--- a/runners/samza/src/test/java/org/apache/beam/runners/samza/runtime/SamzaStoreStateInternalsTest.java
+++ b/runners/samza/src/test/java/org/apache/beam/runners/samza/runtime/SamzaStoreStateInternalsTest.java
@@ -79,7 +79,9 @@ import org.junit.Test;
 /** Tests for SamzaStoreStateInternals. */
 @SuppressWarnings({
   "rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class SamzaStoreStateInternalsTest implements Serializable {
   @Rule
diff --git a/runners/samza/src/test/java/org/apache/beam/runners/samza/translation/ConfigGeneratorTest.java b/runners/samza/src/test/java/org/apache/beam/runners/samza/translation/ConfigGeneratorTest.java
index dc41e2193ff..bb39c186a9f 100644
--- a/runners/samza/src/test/java/org/apache/beam/runners/samza/translation/ConfigGeneratorTest.java
+++ b/runners/samza/src/test/java/org/apache/beam/runners/samza/translation/ConfigGeneratorTest.java
@@ -55,7 +55,8 @@ import org.apache.samza.zk.ZkJobCoordinatorFactory;
 import org.junit.Test;
 
 /** Test config generations for {@link org.apache.beam.runners.samza.SamzaRunner}. */
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 public class ConfigGeneratorTest {
   private static final String APP_RUNNER_CLASS = "app.runner.class";
diff --git a/runners/spark/job-server/spark_job_server.gradle b/runners/spark/job-server/spark_job_server.gradle
index ea7df2c3c0f..d4bc26382ac 100644
--- a/runners/spark/job-server/spark_job_server.gradle
+++ b/runners/spark/job-server/spark_job_server.gradle
@@ -123,24 +123,24 @@ def portableValidatesRunnerTask(String name, boolean streaming, boolean docker,
         excludeCategories 'org.apache.beam.sdk.testing.UsesKeyInParDo'
         excludeCategories 'org.apache.beam.sdk.testing.UsesOnWindowExpiration'
         excludeCategories 'org.apache.beam.sdk.testing.UsesTestStream'
-        // TODO (BEAM-7222) SplittableDoFnTests
+        // TODO (https://github.com/apache/beam/issues/19468) SplittableDoFnTests
         excludeCategories 'org.apache.beam.sdk.testing.UsesBoundedSplittableParDo'
         excludeCategories 'org.apache.beam.sdk.testing.UsesUnboundedSplittableParDo'
         excludeCategories 'org.apache.beam.sdk.testing.UsesStrictTimerOrdering'
         excludeCategories 'org.apache.beam.sdk.testing.UsesBundleFinalizer'
         // Currently unsupported in portable streaming:
-        // TODO (BEAM-10712)
+        // TODO (https://github.com/apache/beam/issues/20395)
         excludeCategories 'org.apache.beam.sdk.testing.UsesSideInputs'
-        // TODO (BEAM-10754)
+        // TODO (https://github.com/apache/beam/issues/20396)
         excludeCategories 'org.apache.beam.sdk.testing.UsesStatefulParDo'
-        // TODO (BEAM-10755)
+        // TODO (https://github.com/apache/beam/issues/20397)
         excludeCategories 'org.apache.beam.sdk.testing.UsesTimersInParDo'
       }
 
       testFilter = {
-        // TODO (BEAM-10094)
+        // TODO (https://github.com/apache/beam/issues/20189)
         excludeTestsMatching 'org.apache.beam.sdk.transforms.FlattenTest.testFlattenWithDifferentInputAndOutputCoders2'
-        // TODO (BEAM-10784) Currently unsupported in portable streaming:
+        // TODO (https://github.com/apache/beam/issues/20429) Currently unsupported in portable streaming:
         // // Timeout error
         excludeTestsMatching 'org.apache.beam.sdk.testing.PAssertTest.testWindowedContainsInAnyOrder'
         excludeTestsMatching 'org.apache.beam.sdk.testing.PAssertTest.testWindowedSerializablePredicate'
@@ -182,14 +182,14 @@ def portableValidatesRunnerTask(String name, boolean streaming, boolean docker,
         excludeCategories 'org.apache.beam.sdk.testing.UsesKeyInParDo'
         excludeCategories 'org.apache.beam.sdk.testing.UsesOnWindowExpiration'
         excludeCategories 'org.apache.beam.sdk.testing.UsesTestStream'
-        // TODO (BEAM-7222) SplittableDoFnTests
+        // TODO (https://github.com/apache/beam/issues/19468) SplittableDoFnTests
         excludeCategories 'org.apache.beam.sdk.testing.UsesBoundedSplittableParDo'
         excludeCategories 'org.apache.beam.sdk.testing.UsesUnboundedSplittableParDo'
         excludeCategories 'org.apache.beam.sdk.testing.UsesStrictTimerOrdering'
         excludeCategories 'org.apache.beam.sdk.testing.UsesBundleFinalizer'
       }
       testFilter = {
-        // TODO (BEAM-10094)
+        // TODO (https://github.com/apache/beam/issues/20189)
         excludeTestsMatching 'org.apache.beam.sdk.transforms.FlattenTest.testFlattenWithDifferentInputAndOutputCoders2'
 
         for (String test : sickbayTests) {
diff --git a/runners/spark/spark_runner.gradle b/runners/spark/spark_runner.gradle
index 81b10f280c3..2fac13b0bf7 100644
--- a/runners/spark/spark_runner.gradle
+++ b/runners/spark/spark_runner.gradle
@@ -371,7 +371,7 @@ tasks.register("examplesIntegrationTest", Test) {
   testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs)
   useJUnit {
     filter {
-      // TODO (BEAM-14019) Fix integration Tests to run with SparkRunner: Failed to read from sharded output
+      // TODO (https://github.com/apache/beam/issues/21344) Fix integration Tests to run with SparkRunner: Failed to read from sharded output
       excludeTestsMatching 'org.apache.beam.examples.WindowedWordCountIT.testWindowedWordCountInStreamingStaticSharding'
       excludeTestsMatching 'org.apache.beam.examples.WindowedWordCountIT.testWindowedWordCountInBatchDynamicSharding'
     }
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunner.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunner.java
index 5dff798e7b0..f913f7cc033 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunner.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunner.java
@@ -156,7 +156,8 @@ public final class SparkRunner extends PipelineRunner<SparkPipelineResult> {
     detectTranslationMode(pipeline);
 
     // Default to using the primitive versions of Read.Bounded and Read.Unbounded.
-    // TODO(BEAM-10670): Use SDF read as default when we address performance issue.
+    // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address
+    // performance issue.
     if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) {
       SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline);
     }
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerDebugger.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerDebugger.java
index a6f59dbfbd4..5f1da96e29d 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerDebugger.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerDebugger.java
@@ -82,7 +82,8 @@ public final class SparkRunnerDebugger extends PipelineRunner<SparkPipelineResul
         options.isStreaming() || options.as(TestSparkPipelineOptions.class).isForceStreaming();
 
     // Default to using the primitive versions of Read.Bounded and Read.Unbounded.
-    // TODO(BEAM-10670): Use SDF read as default when we address performance issue.
+    // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address
+    // performance issue.
     if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) {
       SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline);
     }
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkTransformOverrides.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkTransformOverrides.java
index a536e59c723..748e754364b 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkTransformOverrides.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkTransformOverrides.java
@@ -34,7 +34,8 @@ import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Immutabl
 class SparkTransformOverrides {
   public static List<PTransformOverride> getDefaultOverrides(boolean streaming) {
     ImmutableList.Builder<PTransformOverride> builder = ImmutableList.builder();
-    // TODO: [BEAM-5358] Support @RequiresStableInput on Spark runner
+    // TODO: [https://github.com/apache/beam/issues/19107] Support @RequiresStableInput on Spark
+    // runner
     builder.add(
         PTransformOverride.of(
             PTransformMatchers.requiresStableInputParDoMulti(),
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/SparkStructuredStreamingRunner.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/SparkStructuredStreamingRunner.java
index 162c8f09cea..d66e0c77186 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/SparkStructuredStreamingRunner.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/SparkStructuredStreamingRunner.java
@@ -173,7 +173,8 @@ public final class SparkStructuredStreamingRunner
 
     // Default to using the primitive versions of Read.Bounded and Read.Unbounded for non-portable
     // execution.
-    // TODO(BEAM-10670): Use SDF read as default when we address performance issue.
+    // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address
+    // performance issue.
     if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) {
       SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline);
     }
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/SparkTransformOverrides.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/SparkTransformOverrides.java
index 5760a81f309..996f60cb747 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/SparkTransformOverrides.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/SparkTransformOverrides.java
@@ -34,7 +34,8 @@ import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Immutabl
 class SparkTransformOverrides {
   public static List<PTransformOverride> getDefaultOverrides(boolean streaming) {
     ImmutableList.Builder<PTransformOverride> builder = ImmutableList.builder();
-    // TODO: [BEAM-5358] Support @RequiresStableInput on Spark runner
+    // TODO: [https://github.com/apache/beam/issues/19107] Support @RequiresStableInput on Spark
+    // runner
     builder.add(
         PTransformOverride.of(
             PTransformMatchers.requiresStableInputParDoMulti(),
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkStreamingPortablePipelineTranslator.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkStreamingPortablePipelineTranslator.java
index 1f61cdfbb64..08b418fa366 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkStreamingPortablePipelineTranslator.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkStreamingPortablePipelineTranslator.java
@@ -236,7 +236,7 @@ public class SparkStreamingPortablePipelineTranslator
     Coder windowCoder =
         getWindowingStrategy(inputPCollectionId, components).getWindowFn().windowCoder();
 
-    // TODO (BEAM-10712): handle side inputs.
+    // TODO (https://github.com/apache/beam/issues/20395): handle side inputs.
     if (stagePayload.getSideInputsCount() > 0) {
       throw new UnsupportedOperationException(
           "Side inputs to executable stage are currently unsupported.");
@@ -321,7 +321,8 @@ public class SparkStreamingPortablePipelineTranslator
           // create a single RDD stream.
           Queue<JavaRDD<WindowedValue<T>>> q = new LinkedBlockingQueue<>();
           q.offer(((BoundedDataset) dataset).getRDD());
-          // TODO (BEAM-10789): this is not recoverable from checkpoint!
+          // TODO (https://github.com/apache/beam/issues/20426): this is not recoverable from
+          // checkpoint!
           JavaDStream<WindowedValue<T>> dStream = context.getStreamingContext().queueStream(q);
           dStreams.add(dStream);
         }
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
index a8108105560..077b646697f 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
@@ -271,7 +271,8 @@ public final class StreamingTransformTranslator {
             // create a single RDD stream.
             Queue<JavaRDD<WindowedValue<T>>> q = new LinkedBlockingQueue<>();
             q.offer(((BoundedDataset) dataset).getRDD());
-            // TODO (BEAM-10789): this is not recoverable from checkpoint!
+            // TODO (https://github.com/apache/beam/issues/20426): this is not recoverable from
+            // checkpoint!
             JavaDStream<WindowedValue<T>> dStream = context.getStreamingContext().queueStream(q);
             dStreams.add(dStream);
           }
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/ResumeFromCheckpointStreamingTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/ResumeFromCheckpointStreamingTest.java
index 3767d5c1f33..bf197b3eeb7 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/ResumeFromCheckpointStreamingTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/ResumeFromCheckpointStreamingTest.java
@@ -352,7 +352,7 @@ public class ResumeFromCheckpointStreamingTest implements Serializable {
 
   /**
    * A custom PAssert that avoids using {@link org.apache.beam.sdk.transforms.Flatten} until
-   * BEAM-1444 is resolved.
+   * https://github.com/apache/beam/issues/18144 is resolved.
    */
   private static class PAssertWithoutFlatten<T>
       extends PTransform<PCollection<Iterable<T>>, PDone> {
diff --git a/runners/twister2/src/main/java/org/apache/beam/runners/twister2/Twister2Runner.java b/runners/twister2/src/main/java/org/apache/beam/runners/twister2/Twister2Runner.java
index d48bcb85b45..890172f8934 100644
--- a/runners/twister2/src/main/java/org/apache/beam/runners/twister2/Twister2Runner.java
+++ b/runners/twister2/src/main/java/org/apache/beam/runners/twister2/Twister2Runner.java
@@ -91,7 +91,8 @@ public class Twister2Runner extends PipelineRunner<PipelineResult> {
     LOG.info("Translating pipeline to Twister2 program.");
     pipeline.replaceAll(getDefaultOverrides());
 
-    // TODO(BEAM-10670): Use SDF read as default when we address performance issue.
+    // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address
+    // performance issue.
     if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) {
       SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline);
     }
@@ -152,7 +153,8 @@ public class Twister2Runner extends PipelineRunner<PipelineResult> {
     LOG.info("Translating pipeline to Twister2 program.");
     pipeline.replaceAll(getDefaultOverrides());
 
-    // TODO(BEAM-10670): Use SDF read as default when we address performance issue.
+    // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address
+    // performance issue.
     if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) {
       SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline);
     }