You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by bh...@apache.org on 2022/06/16 19:00:25 UTC

[beam] branch master updated: Update references to Jira to GH for the Java SDK (#21836)

This is an automated email from the ASF dual-hosted git repository.

bhulette pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git


The following commit(s) were added to refs/heads/master by this push:
     new b1634f0146c Update references to Jira to GH for the Java SDK (#21836)
b1634f0146c is described below

commit b1634f0146c58eb94b7b1d46d76a4244aa31062e
Author: Danny McCormick <da...@google.com>
AuthorDate: Thu Jun 16 15:00:18 2022 -0400

    Update references to Jira to GH for the Java SDK (#21836)
    
    * Update references to Jira to GH for the Java SDK
    
    * Fix broken links
    
    * spotless fixes
    
    * Spotless apply
    
    * Update to urls
    
    * Spotless apply
    
    * Spotless apply
    
    * Fix weird multiline formatting
    
    * Spacing
---
 sdks/java/container/boot.go                         |  2 +-
 .../apache/beam/sdk/coders/IterableLikeCoder.java   |  3 ++-
 .../java/org/apache/beam/sdk/io/WriteFiles.java     |  3 ++-
 .../org/apache/beam/sdk/metrics/MetricsFilter.java  |  4 ++--
 .../apache/beam/sdk/schemas/SchemaTranslation.java  | 13 +++++++------
 .../sdk/schemas/logicaltypes/MicrosInstant.java     |  2 +-
 .../beam/sdk/schemas/logicaltypes/NanosType.java    |  2 +-
 .../apache/beam/sdk/schemas/transforms/Cast.java    |  2 +-
 .../apache/beam/sdk/schemas/transforms/Select.java  |  2 +-
 .../sdk/schemas/utils/StaticSchemaInference.java    |  2 +-
 .../org/apache/beam/sdk/transforms/Deduplicate.java |  3 ++-
 .../java/org/apache/beam/sdk/transforms/DoFn.java   |  4 ++--
 .../beam/sdk/transforms/GroupIntoBatches.java       |  4 +++-
 .../java/org/apache/beam/sdk/transforms/Watch.java  |  4 ++--
 .../splittabledofn/GrowableOffsetRangeTracker.java  |  4 ++--
 .../java/org/apache/beam/sdk/util/ApiSurface.java   |  3 ++-
 .../org/apache/beam/sdk/util/HistogramData.java     | 14 ++++++++------
 .../apache/beam/sdk/util/PythonCallableSource.java  |  3 ++-
 .../apache/beam/sdk/values/PCollectionViews.java    |  6 ++++--
 .../org/apache/beam/sdk/coders/AvroCoderTest.java   |  2 +-
 .../beam/sdk/coders/PCollectionCustomCoderTest.java |  4 ++--
 .../org/apache/beam/sdk/io/LocalResourceIdTest.java |  2 +-
 .../test/java/org/apache/beam/sdk/io/ReadTest.java  |  3 ++-
 .../org/apache/beam/sdk/testing/TestStreamTest.java |  3 ++-
 .../beam/sdk/transforms/ParDoLifecycleTest.java     |  4 +++-
 .../apache/beam/sdk/transforms/ParDoSchemaTest.java |  3 ++-
 .../org/apache/beam/sdk/transforms/ParDoTest.java   |  4 +++-
 .../beam/sdk/transforms/PerKeyOrderingTest.java     |  3 ++-
 .../sdk/transforms/reflect/DoFnInvokersTest.java    |  4 +++-
 .../sdk/transforms/reflect/DoFnSignaturesTest.java  |  3 ++-
 .../sdk/transforms/reflect/OnTimerInvokersTest.java |  3 ++-
 .../reflect/testhelper/DoFnInvokersTestHelper.java  |  3 ++-
 .../sdk/expansion/service/ExpansionService.java     |  5 +++--
 .../beam/sdk/extensions/gcp/util/GcsUtil.java       |  3 ++-
 .../beam/sdk/extensions/ml/BatchRequestForDLP.java  |  3 ++-
 .../extensions/python/PythonExternalTransform.java  |  3 ++-
 .../python/PythonExternalTransformTest.java         |  2 +-
 .../beam/sdk/extensions/sbe/IrFieldGenerator.java   |  2 +-
 .../beam/sdk/extensions/sbe/PrimitiveSbeField.java  |  5 +++--
 sdks/java/extensions/sql/build.gradle               |  6 +++---
 .../provider/hcatalog/BeamSqlHiveSchemaTest.java    |  3 ++-
 .../sdk/extensions/sql/impl/ScalarFunctionImpl.java |  3 ++-
 .../sql/impl/parser/SqlCreateFunction.java          |  2 +-
 .../sdk/extensions/sql/impl/rel/BeamCalcRel.java    | 12 ++++++++----
 .../sdk/extensions/sql/impl/rel/BeamSortRel.java    |  6 ++++--
 .../meta/provider/SchemaIOTableProviderWrapper.java |  2 +-
 .../sql/meta/provider/avro/AvroTableProvider.java   |  3 ++-
 .../provider/bigquery/BeamSqlUnparseContext.java    |  2 +-
 .../sql/meta/provider/mongodb/MongoDbTable.java     |  6 ++++--
 .../meta/provider/pubsub/PubsubTableProvider.java   |  3 ++-
 .../beam/sdk/extensions/sql/BeamSqlExplainTest.java |  2 +-
 .../BeamSqlDateFunctionsIntegrationTest.java        |  2 +-
 .../meta/provider/kafka/KafkaTableProviderIT.java   |  3 ++-
 .../meta/provider/pubsub/PubsubTableProviderIT.java | 11 ++++++-----
 sdks/java/extensions/sql/zetasql/build.gradle       |  2 +-
 .../extensions/sql/zetasql/BeamZetaSqlCalcRel.java  |  7 ++++---
 .../extensions/sql/zetasql/BeamZetaSqlCatalog.java  |  4 ++--
 .../extensions/sql/zetasql/ZetaSQLQueryPlanner.java |  3 ++-
 .../sql/zetasql/ZetaSqlBeamTranslationUtils.java    |  3 ++-
 .../sql/zetasql/ZetaSqlCalciteTranslationUtils.java |  3 ++-
 .../sql/zetasql/translation/SqlOperators.java       |  2 +-
 .../extensions/sql/zetasql/StreamingSqlTest.java    |  3 ++-
 .../sql/zetasql/ZetaSqlDialectSpecTest.java         | 14 ++++++++------
 .../extensions/sql/zetasql/ZetaSqlJavaUdfTest.java  |  6 ++++--
 .../sql/zetasql/ZetaSqlNativeUdfTest.java           |  3 ++-
 .../sql/zetasql/ZetaSqlTimeFunctionsTest.java       | 21 ++++++++++++++-------
 .../beam/fn/harness/jmh/ProcessBundleBenchmark.java |  4 +++-
 .../apache/beam/fn/harness/AssignWindowsRunner.java |  3 ++-
 .../java/org/apache/beam/fn/harness/FnHarness.java  |  3 ++-
 .../harness/data/PCollectionConsumerRegistry.java   |  6 ++++--
 .../apache/beam/fn/harness/FnApiDoFnRunnerTest.java |  4 +++-
 .../harness/control/ProcessBundleHandlerTest.java   |  3 ++-
 .../org/apache/beam/sdk/io/aws/s3/S3FileSystem.java |  3 ++-
 .../apache/beam/sdk/io/aws2/s3/S3FileSystem.java    |  3 ++-
 .../apache/beam/sdk/io/clickhouse/ClickHouseIO.java |  2 +-
 .../apache/beam/sdk/io/gcp/bigquery/BatchLoads.java |  3 ++-
 .../apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java |  2 +-
 .../sdk/io/gcp/bigquery/BigQueryServicesImpl.java   |  6 +++---
 .../io/gcp/bigquery/BigQueryStorageArrowReader.java |  3 ++-
 .../sdk/io/gcp/bigquery/BigQueryStorageReader.java  |  3 ++-
 .../bigquery/StorageApiWritesShardedRecords.java    |  4 +++-
 .../beam/sdk/io/gcp/firestore/FirestoreV1.java      |  5 +++--
 .../apache/beam/sdk/io/gcp/healthcare/DicomIO.java  |  2 +-
 .../beam/sdk/io/gcp/pubsub/PubsubMessages.java      |  2 +-
 .../beam/sdk/io/gcp/pubsub/TestPubsubSignal.java    |  4 +++-
 .../apache/beam/sdk/io/gcp/spanner/SpannerIO.java   |  4 ++--
 .../io/gcp/spanner/SpannerTransformRegistrar.java   |  3 ++-
 .../beam/sdk/io/gcp/healthcare/DicomIOReadIT.java   |  2 +-
 .../beam/sdk/io/gcp/pubsublite/ReadWriteIT.java     |  6 +++---
 .../changestreams/SpannerChangeStreamErrorTest.java |  2 +-
 sdks/java/io/hcatalog/build.gradle                  |  2 +-
 .../beam/sdk/io/kafka/KafkaExactlyOnceSink.java     |  4 +++-
 .../java/org/apache/beam/sdk/io/kafka/KafkaIO.java  |  8 +++++---
 .../KafkaIOReadImplementationCompatibility.java     |  2 +-
 .../apache/beam/sdk/io/kafka/ReadFromKafkaDoFn.java |  2 +-
 .../sdk/io/kafka/WatchKafkaTopicPartitionDoFn.java  |  4 +++-
 .../org/apache/beam/sdk/io/solr/SolrIOTest.java     |  2 +-
 .../beam/sdk/io/splunk/SplunkEventWriter.java       |  4 +++-
 sdks/java/testing/nexmark/build.gradle              |  2 +-
 .../org/apache/beam/sdk/nexmark/NexmarkUtils.java   |  4 +++-
 .../apache/beam/sdk/nexmark/queries/Query10.java    |  3 ++-
 .../org/apache/beam/sdk/nexmark/queries/Query3.java |  4 +++-
 .../beam/sdk/nexmark/queries/SqlQueryTest.java      |  4 ++--
 .../beam/sdk/nexmark/queries/sql/SqlQuery5Test.java |  2 +-
 .../nexmark/sources/UnboundedEventSourceTest.java   |  2 +-
 sdks/java/testing/tpcds/build.gradle                |  2 +-
 106 files changed, 254 insertions(+), 160 deletions(-)

diff --git a/sdks/java/container/boot.go b/sdks/java/container/boot.go
index d067db2b304..d0411331874 100644
--- a/sdks/java/container/boot.go
+++ b/sdks/java/container/boot.go
@@ -112,7 +112,7 @@ func main() {
 	// Using the SDK Harness ID in the artifact destination path to make sure that dependencies used by multiple
 	// SDK Harnesses in the same VM do not conflict. This is needed since some runners (for example, Dataflow)
 	// may share the artifact staging directory across multiple SDK Harnesses
-	// TODO(BEAM-9455): consider removing the SDK Harness ID from the staging path after Dataflow can properly
+	// TODO(https://github.com/apache/beam/issues/20009): consider removing the SDK Harness ID from the staging path after Dataflow can properly
 	// seperate out dependencies per environment.
 	dir := filepath.Join(*semiPersistDir, *id, "staged")
 
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/IterableLikeCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/IterableLikeCoder.java
index 388430c9917..4e387e4a929 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/IterableLikeCoder.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/IterableLikeCoder.java
@@ -209,7 +209,8 @@ public abstract class IterableLikeCoder<T, IterableT extends Iterable<T>>
           elementCoder.registerByteSizeObserver(elem, observer);
         }
       } else {
-        // TODO: (BEAM-1537) Update to use an accurate count depending on size and count,
+        // TODO: (https://github.com/apache/beam/issues/18169) Update to use an accurate count
+        // depending on size and count,
         // currently we are under estimating the size by up to 10 bytes per block of data since we
         // are not encoding the count prefix which occurs at most once per 64k of data and is upto
         // 10 bytes long. Since we include the total count we can upper bound the underestimate
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/WriteFiles.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/WriteFiles.java
index 4019474c0e7..8d660a99ade 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/WriteFiles.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/WriteFiles.java
@@ -745,7 +745,8 @@ public abstract class WriteFiles<UserT, DestinationT, OutputT>
       // records buffered or they have been buffered for a certain time, controlled by
       // FILE_TRIGGERING_RECORD_COUNT and BUFFERING_DURATION respectively.
       //
-      // TODO(BEAM-12040): The implementation doesn't currently work with merging windows.
+      // TODO(https://github.com/apache/beam/issues/20928): The implementation doesn't currently
+      // work with merging windows.
       PCollection<KV<org.apache.beam.sdk.util.ShardedKey<Integer>, Iterable<UserT>>> shardedInput =
           input
               .apply(
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsFilter.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsFilter.java
index c08403e224b..d1336790e1f 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsFilter.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsFilter.java
@@ -81,8 +81,8 @@ public abstract class MetricsFilter {
      * as "foo", "bar" or "foo/bar". However, each component of the step name must be completely
      * matched, so the filter "foo" will not match a step name such as "fool/bar/foot"
      *
-     * <p>TODO(BEAM-12154): Beam does not guarantee a specific format for step IDs hence we should
-     * not assume a "foo/bar/baz" format here.
+     * <p>TODO(https://github.com/apache/beam/issues/20919): Beam does not guarantee a specific
+     * format for step IDs hence we should not assume a "foo/bar/baz" format here.
      */
     public Builder addStep(String step) {
       immutableStepsBuilder().add(step);
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java
index fa7dc8d01dd..5d92f47fc17 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java
@@ -68,7 +68,8 @@ public class SchemaTranslation {
   private static final String URN_BEAM_LOGICAL_DECIMAL = "beam:logical_type:decimal:v1";
   private static final String URN_BEAM_LOGICAL_JAVASDK = "beam:logical_type:javasdk:v1";
 
-  // TODO(BEAM-7855): Populate this with a LogicalTypeRegistrar, which includes a way to construct
+  // TODO(https://github.com/apache/beam/issues/19715): Populate this with a LogicalTypeRegistrar,
+  // which includes a way to construct
   // the LogicalType given an argument.
   private static final ImmutableMap<String, Class<? extends LogicalType<?, ?>>>
       STANDARD_LOGICAL_TYPES =
@@ -171,9 +172,9 @@ public class SchemaTranslation {
               SchemaApi.LogicalType.newBuilder()
                   .setRepresentation(
                       fieldTypeToProto(logicalType.getBaseType(), serializeLogicalType))
-                  // TODO(BEAM-7855): "javasdk" types should only be a last resort. Types defined in
-                  // Beam should have their own URN, and there should be a mechanism for users to
-                  // register their own types by URN.
+                  // TODO(https://github.com/apache/beam/issues/19715): "javasdk" types should only
+                  // be a last resort. Types defined in Beam should have their own URN, and there
+                  // should be a mechanism for users to register their own types by URN.
                   .setUrn(URN_BEAM_LOGICAL_JAVASDK);
           if (logicalType.getArgumentType() != null) {
             logicalTypeBuilder =
@@ -193,7 +194,7 @@ public class SchemaTranslation {
         builder.setLogicalType(logicalTypeBuilder.build());
         break;
         // Special-case for DATETIME and DECIMAL which are logical types in portable representation,
-        // but not yet in Java. (BEAM-7554)
+        // but not yet in Java. (https://github.com/apache/beam/issues/19817)
       case DATETIME:
         builder.setLogicalType(
             SchemaApi.LogicalType.newBuilder()
@@ -356,7 +357,7 @@ public class SchemaTranslation {
           }
         }
         // Special-case for DATETIME and DECIMAL which are logical types in portable representation,
-        // but not yet in Java. (BEAM-7554)
+        // but not yet in Java. (https://github.com/apache/beam/issues/19817)
         if (urn.equals(URN_BEAM_LOGICAL_DATETIME)) {
           return FieldType.DATETIME;
         } else if (urn.equals(URN_BEAM_LOGICAL_DECIMAL)) {
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/MicrosInstant.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/MicrosInstant.java
index b896f147a64..90cd2587fde 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/MicrosInstant.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/MicrosInstant.java
@@ -43,7 +43,7 @@ public class MicrosInstant implements Schema.LogicalType<Instant, Row> {
           .getValueDescriptor()
           .getOptions()
           .getExtension(RunnerApi.beamUrn);
-  // TODO(BEAM-10878): This should be a constant
+  // TODO(https://github.com/apache/beam/issues/20540): This should be a constant
   private final Schema schema;
 
   public MicrosInstant() {
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/NanosType.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/NanosType.java
index 07af7985b07..9f06483a5aa 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/NanosType.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/NanosType.java
@@ -23,7 +23,7 @@ import org.apache.beam.sdk.values.Row;
 /** Base class for types representing timestamps or durations as nanoseconds. */
 abstract class NanosType<T> implements Schema.LogicalType<T, Row> {
   private final String identifier;
-  // TODO(BEAM-10878): This should be a constant
+  // TODO(https://github.com/apache/beam/issues/20540): This should be a constant
   protected final Schema schema;
 
   NanosType(String identifier) {
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Cast.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Cast.java
index 7e2eda89cd5..3be08c577e6 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Cast.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Cast.java
@@ -298,7 +298,7 @@ public abstract class Cast<T> extends PTransform<PCollection<T>, PCollection<Row
                 new DoFn<T, Row>() {
                   // TODO: This should be the same as resolved so that Beam knows which fields
                   // are being accessed. Currently Beam only supports wildcard descriptors.
-                  // Once BEAM-4457 is fixed, fix this.
+                  // Once https://github.com/apache/beam/issues/18903 is fixed, fix this.
                   @FieldAccess("filterFields")
                   final FieldAccessDescriptor fieldAccessDescriptor =
                       FieldAccessDescriptor.withAllFields();
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Select.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Select.java
index 8ccc932edbc..163506fcf75 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Select.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/transforms/Select.java
@@ -133,7 +133,7 @@ public class Select {
 
     // TODO: This should be the same as resolved so that Beam knows which fields
     // are being accessed. Currently Beam only supports wildcard descriptors.
-    // Once BEAM-4457 is fixed, fix this.
+    // Once https://github.com/apache/beam/issues/18903 is fixed, fix this.
     @FieldAccess("selectFields")
     final FieldAccessDescriptor fieldAccess = FieldAccessDescriptor.withAllFields();
 
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/StaticSchemaInference.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/StaticSchemaInference.java
index beb72606f61..737c96c463f 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/StaticSchemaInference.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/StaticSchemaInference.java
@@ -126,7 +126,7 @@ public class StaticSchemaInference {
     return fieldFromType(type, fieldValueTypeSupplier, new HashMap<Class, Schema>());
   }
 
-  // TODO(BEAM-14458): support type inference for logical types
+  // TODO(https://github.com/apache/beam/issues/21567): support type inference for logical types
   private static Schema.FieldType fieldFromType(
       TypeDescriptor type,
       FieldValueTypeSupplier fieldValueTypeSupplier,
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Deduplicate.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Deduplicate.java
index bbbcb859aef..b3ea4ddd6af 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Deduplicate.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Deduplicate.java
@@ -64,7 +64,8 @@ import org.joda.time.Duration;
  *     words.apply(Deduplicate.<String>values());
  * }</pre>
  */
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 public final class Deduplicate {
   /** The default is the {@link TimeDomain#PROCESSING_TIME processing time domain}. */
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
index 503739a3551..487c78083a7 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
@@ -859,8 +859,8 @@ public abstract class DoFn<InputT extends @Nullable Object, OutputT extends @Nul
    *       commits the output of this bundle. See <a
    *       href="https://s.apache.org/beam-finalizing-bundles">Apache Beam Portability API: How to
    *       Finalize Bundles</a> for further details.
-   *   <li>TODO(BEAM-1287): Add support for an {@link OutputReceiver} and {@link
-   *       MultiOutputReceiver} that can output to a window.
+   *   <li>TODO(https://github.com/apache/beam/issues/18203): Add support for an {@link
+   *       OutputReceiver} and {@link MultiOutputReceiver} that can output to a window.
    * </ul>
    *
    * <p>Note that {@link FinishBundle @FinishBundle} is invoked before the runner commits the output
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupIntoBatches.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupIntoBatches.java
index fe013cb929b..b1e685e4c19 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupIntoBatches.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupIntoBatches.java
@@ -104,7 +104,9 @@ import org.slf4j.LoggerFactory;
 @SuppressWarnings({
   "nullness", // TODO(https://github.com/apache/beam/issues/20497)
   "rawtypes",
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class GroupIntoBatches<K, InputT>
     extends PTransform<PCollection<KV<K, InputT>>, PCollection<KV<K, Iterable<InputT>>>> {
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Watch.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Watch.java
index 81385b28758..0c92cf6eacd 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Watch.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Watch.java
@@ -1112,8 +1112,8 @@ public class Watch {
 
     @Override
     public SplitResult<GrowthState> trySplit(double fractionOfRemainder) {
-      // TODO(BEAM-8873): Add support for splitting off a fixed amount of work for this restriction
-      // instead of only supporting checkpointing.
+      // TODO(https://github.com/apache/beam/issues/19908): Add support for splitting off a fixed
+      // amount of work for this restriction instead of only supporting checkpointing.
 
       // residual should contain exactly the work *not* claimed in the current ProcessElement call -
       // unclaimed pending outputs or future polling output
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/splittabledofn/GrowableOffsetRangeTracker.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/splittabledofn/GrowableOffsetRangeTracker.java
index 5bc512773a0..b4f7ff6eef4 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/splittabledofn/GrowableOffsetRangeTracker.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/splittabledofn/GrowableOffsetRangeTracker.java
@@ -53,8 +53,8 @@ public class GrowableOffsetRangeTracker extends OffsetRangeTracker {
    * <p>If {@link #estimate} is expensive to compute, consider wrapping the implementation with
    * {@link Suppliers#memoizeWithExpiration} or equivalent as an optimization.
    *
-   * <p>TODO(BEAM-10032): Also consider using {@link RangeEndEstimator} when the range is not ended
-   * with {@code Long.MAX_VALUE}.
+   * <p>TODO(https://github.com/apache/beam/issues/20227): Also consider using {@link
+   * RangeEndEstimator} when the range is not ended with {@code Long.MAX_VALUE}.
    */
   @FunctionalInterface
   public interface RangeEndEstimator {
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ApiSurface.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ApiSurface.java
index 8aa9327aea2..6a5107ad616 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ApiSurface.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ApiSurface.java
@@ -321,7 +321,8 @@ public class ApiSurface {
       try {
         clazz = classInfo.load();
       } catch (NoClassDefFoundError e) {
-        // TODO: Ignore any NoClassDefFoundError errors as a workaround. (BEAM-2231)
+        // TODO: Ignore any NoClassDefFoundError errors as a workaround.
+        // (https://github.com/apache/beam/issues/18368)
         LOG.warn("Failed to load class: {}", classInfo.toString(), e);
         continue;
       }
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/HistogramData.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/HistogramData.java
index 7eae8a65791..3edc5239e23 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/HistogramData.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/HistogramData.java
@@ -43,7 +43,8 @@ public class HistogramData implements Serializable {
 
   private final BucketType bucketType;
 
-  // TODO(BEAM-12103): Update this function to remove the numTopRecords and numBottomRecords
+  // TODO(https://github.com/apache/beam/issues/20853): Update this function to remove the
+  // numTopRecords and numBottomRecords
   // and include those counters in the buckets array.
   private long[] buckets;
   private long numBoundedBucketRecords;
@@ -68,8 +69,8 @@ public class HistogramData implements Serializable {
   }
 
   /**
-   * TODO(BEAM-12103): Update this function to define numBuckets total, including the infinite
-   * buckets. Create a histogram with linear buckets.
+   * TODO(https://github.com/apache/beam/issues/20853): Update this function to define numBuckets
+   * total, including the infinite buckets. Create a histogram with linear buckets.
    *
    * @param start Lower bound of a starting bucket.
    * @param width Bucket width. Smaller width implies a better resolution for percentile estimation.
@@ -103,7 +104,8 @@ public class HistogramData implements Serializable {
     }
   }
 
-  // TODO(BEAM-12103): Update this function to allow incrementing the infinite buckets as well.
+  // TODO(https://github.com/apache/beam/issues/20853): Update this function to allow incrementing
+  // the infinite buckets as well.
   // and remove the incTopBucketCount and incBotBucketCount methods.
   // Using 0 and length -1 as the bucketIndex.
   public synchronized void incBucketCount(int bucketIndex, long count) {
@@ -150,8 +152,8 @@ public class HistogramData implements Serializable {
   }
 
   /**
-   * TODO(BEAM-12103): Update this function to allow indexing the -INF and INF bucket (using 0 and
-   * length -1) Get the bucket count for the given bucketIndex.
+   * TODO(https://github.com/apache/beam/issues/20853): Update this function to allow indexing the
+   * -INF and INF bucket (using 0 and length -1) Get the bucket count for the given bucketIndex.
    *
    * <p>This method does not guarantee the atomicity when sequentially accessing the multiple
    * buckets i.e. other threads may alter the value between consecutive invocations. For summing the
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PythonCallableSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PythonCallableSource.java
index 62a2cd727f1..069c3d9e451 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PythonCallableSource.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PythonCallableSource.java
@@ -50,7 +50,8 @@ public class PythonCallableSource implements Serializable {
   }
 
   public static PythonCallableSource of(String pythonCallableCode) {
-    // TODO(BEAM-14457): check syntactic correctness of Python code if possible
+    // TODO(https://github.com/apache/beam/issues/21568): check syntactic correctness of Python code
+    // if possible
     return new PythonCallableSource(pythonCallableCode);
   }
 
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionViews.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionViews.java
index 21b06cd6e48..481962c5e01 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionViews.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionViews.java
@@ -1042,7 +1042,8 @@ public class PCollectionViews {
 
     @Override
     public Map<K, Iterable<V>> apply(MultimapView<Void, KV<K, V>> primitiveViewT) {
-      // TODO: BEAM-3071 - fix this so that we aren't relying on Java equality and are
+      // TODO: https://github.com/apache/beam/issues/18569 - fix this so that we aren't relying on
+      // Java equality and are
       // using structural value equality.
       Multimap<K, V> multimap = ArrayListMultimap.create();
       for (KV<K, V> elem : primitiveViewT.get(null)) {
@@ -1127,7 +1128,8 @@ public class PCollectionViews {
 
     @Override
     public Map<K, V> apply(MultimapView<Void, KV<K, V>> primitiveViewT) {
-      // TODO: BEAM-3071 - fix this so that we aren't relying on Java equality and are
+      // TODO: https://github.com/apache/beam/issues/18569 - fix this so that we aren't relying on
+      // Java equality and are
       // using structural value equality.
       Map<K, V> map = new HashMap<>();
       for (KV<K, V> elem : primitiveViewT.get(null)) {
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java
index 1827a75c48d..76e1568dfab 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java
@@ -249,7 +249,7 @@ public class AvroCoderTest {
 
   /**
    * Confirm that we can serialize and deserialize an AvroCoder object and still decode after.
-   * (BEAM-349).
+   * (https://github.com/apache/beam/issues/18022).
    *
    * @throws Exception
    */
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/PCollectionCustomCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/PCollectionCustomCoderTest.java
index 37e6f217197..16f0225d2f9 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/PCollectionCustomCoderTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/PCollectionCustomCoderTest.java
@@ -220,7 +220,7 @@ public class PCollectionCustomCoderTest {
     p.run().waitUntilFinish();
   }
 
-  // TODO(BEAM-6004) Have DirectRunner trigger deserialization.
+  // TODO(https://github.com/apache/beam/issues/19185) Have DirectRunner trigger deserialization.
   @Ignore("DirectRunner doesn't decode coders so this test does not pass.")
   @Test
   @Category(NeedsRunner.class)
@@ -232,7 +232,7 @@ public class PCollectionCustomCoderTest {
     p.run().waitUntilFinish();
   }
 
-  // TODO(BEAM-6004) Have DirectRunner trigger deserialization.
+  // TODO(https://github.com/apache/beam/issues/19185) Have DirectRunner trigger deserialization.
   @Ignore("DirectRunner doesn't decode coders so this test does not pass.")
   @Test
   @Category(NeedsRunner.class)
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/LocalResourceIdTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/LocalResourceIdTest.java
index 94c7121ccc6..6c519905e82 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/LocalResourceIdTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/LocalResourceIdTest.java
@@ -45,7 +45,7 @@ import org.junit.runners.JUnit4;
 /**
  * Tests for {@link LocalResourceId}.
  *
- * <p>TODO: re-enable unicode tests when BEAM-1453 is resolved.
+ * <p>TODO: re-enable unicode tests when https://github.com/apache/beam/issues/18213 is resolved.
  */
 @RunWith(JUnit4.class)
 public class LocalResourceIdTest {
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java
index a8c1ee9d79e..315228e7768 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java
@@ -176,7 +176,8 @@ public class ReadTest implements Serializable {
     PAssert.that(input)
         .containsInAnyOrder(
             LongStream.rangeClosed(1L, numElements).boxed().collect(Collectors.toList()));
-    // TODO(BEAM-10670): Remove additional experiments when SDF read is default.
+    // TODO(https://github.com/apache/beam/issues/20530): Remove additional experiments when SDF
+    // read is default.
     ExperimentalOptions.addExperiment(
         pipeline.getOptions().as(ExperimentalOptions.class), "use_sdf_read");
     // Force the pipeline to run with one thread to ensure the reader will be reused on one DoFn
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestStreamTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestStreamTest.java
index 0a61e480d78..98f42f3b952 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestStreamTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestStreamTest.java
@@ -85,7 +85,8 @@ import org.junit.runners.JUnit4;
 
 /** Tests for {@link TestStream}. */
 @RunWith(JUnit4.class)
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 public class TestStreamTest implements Serializable {
   @Rule public transient TestPipeline p = TestPipeline.create();
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoLifecycleTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoLifecycleTest.java
index 097705e84f2..dee441aefe8 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoLifecycleTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoLifecycleTest.java
@@ -64,7 +64,9 @@ import org.junit.runners.JUnit4;
 @RunWith(JUnit4.class)
 @SuppressWarnings({
   "rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class ParDoLifecycleTest implements Serializable {
 
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoSchemaTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoSchemaTest.java
index 6b2a2b4cdd9..0d560bc026d 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoSchemaTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoSchemaTest.java
@@ -70,7 +70,8 @@ import org.junit.runners.JUnit4;
 /** Test {@link Schema} support. */
 @RunWith(JUnit4.class)
 @Category(UsesSchema.class)
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 public class ParDoSchemaTest implements Serializable {
   @Rule public final transient TestPipeline pipeline = TestPipeline.create();
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
index 44bf0f66b62..7e724e0e74d 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
@@ -173,7 +173,9 @@ import org.junit.runners.JUnit4;
 /** Tests for ParDo. */
 @SuppressWarnings({
   "rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class ParDoTest implements Serializable {
   // This test is Serializable, just so that it's easy to have
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PerKeyOrderingTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PerKeyOrderingTest.java
index b66b1cebd5b..f15c8b2e941 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PerKeyOrderingTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PerKeyOrderingTest.java
@@ -47,7 +47,8 @@ import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
 
 @SuppressWarnings(
-    "unused") // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+    "unused") // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+// errorprone is released (2.11.0)
 @RunWith(JUnit4.class)
 public class PerKeyOrderingTest implements Serializable {
   @Rule public final transient TestPipeline pipeline = TestPipeline.create();
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnInvokersTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnInvokersTest.java
index 0daa2798549..32ee1e2c6f1 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnInvokersTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnInvokersTest.java
@@ -93,7 +93,9 @@ import org.mockito.MockitoAnnotations;
 @SuppressWarnings({
   "rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
   "SameNameButDifferent",
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class DoFnInvokersTest {
   @Rule public ExpectedException thrown = ExpectedException.none();
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnSignaturesTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnSignaturesTest.java
index 1c6a3277991..61f2971bde9 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnSignaturesTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnSignaturesTest.java
@@ -94,7 +94,8 @@ import org.junit.runners.JUnit4;
 
 /** Tests for {@link DoFnSignatures}. */
 @RunWith(JUnit4.class)
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 public class DoFnSignaturesTest {
 
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/OnTimerInvokersTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/OnTimerInvokersTest.java
index beb1b86da16..fed422b444d 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/OnTimerInvokersTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/OnTimerInvokersTest.java
@@ -39,7 +39,8 @@ import org.mockito.MockitoAnnotations;
 
 /** Tests for {@link DoFnInvokers}. */
 @RunWith(JUnit4.class)
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 public class OnTimerInvokersTest {
   @Rule public ExpectedException thrown = ExpectedException.none();
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/testhelper/DoFnInvokersTestHelper.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/testhelper/DoFnInvokersTestHelper.java
index 404a6f7e03f..6b0a066ac3e 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/testhelper/DoFnInvokersTestHelper.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/testhelper/DoFnInvokersTestHelper.java
@@ -32,7 +32,8 @@ import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
  * Test helper for {@link DoFnInvokersTest}, which needs to test package-private access to DoFns in
  * other packages.
  */
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 public class DoFnInvokersTestHelper {
 
diff --git a/sdks/java/expansion-service/src/main/java/org/apache/beam/sdk/expansion/service/ExpansionService.java b/sdks/java/expansion-service/src/main/java/org/apache/beam/sdk/expansion/service/ExpansionService.java
index 8c4855a3842..d1acfbb54a7 100644
--- a/sdks/java/expansion-service/src/main/java/org/apache/beam/sdk/expansion/service/ExpansionService.java
+++ b/sdks/java/expansion-service/src/main/java/org/apache/beam/sdk/expansion/service/ExpansionService.java
@@ -465,7 +465,8 @@ public class ExpansionService extends ExpansionServiceGrpc.ExpansionServiceImplB
     if (!isUseDeprecatedRead) {
       ExperimentalOptions.addExperiment(
           pipeline.getOptions().as(ExperimentalOptions.class), "beam_fn_api");
-      // TODO(BEAM-10670): Remove this when we address performance issue.
+      // TODO(https://github.com/apache/beam/issues/20530): Remove this when we address performance
+      // issue.
       ExperimentalOptions.addExperiment(
           pipeline.getOptions().as(ExperimentalOptions.class), "use_sdf_read");
     } else {
@@ -568,7 +569,7 @@ public class ExpansionService extends ExpansionServiceGrpc.ExpansionServiceImplB
   }
 
   protected Pipeline createPipeline() {
-    // TODO: [BEAM-12599]: implement proper validation
+    // TODO: [https://github.com/apache/beam/issues/21064]: implement proper validation
     PipelineOptions effectiveOpts = PipelineOptionsFactory.create();
     PortablePipelineOptions portableOptions = effectiveOpts.as(PortablePipelineOptions.class);
     PortablePipelineOptions specifiedOptions = pipelineOptions.as(PortablePipelineOptions.class);
diff --git a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtil.java b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtil.java
index 9258ec655a9..a4938e2a89d 100644
--- a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtil.java
+++ b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtil.java
@@ -1041,7 +1041,8 @@ public class GcsUtil {
   }
 
   public void remove(Collection<String> filenames) throws IOException {
-    // TODO(BEAM-8268): It would be better to add per-file retries and backoff
+    // TODO(https://github.com/apache/beam/issues/19859): It would be better to add per-file retries
+    // and backoff
     // instead of failing everything if a single operation fails.
     executeBatches(makeRemoveBatches(filenames));
   }
diff --git a/sdks/java/extensions/ml/src/main/java/org/apache/beam/sdk/extensions/ml/BatchRequestForDLP.java b/sdks/java/extensions/ml/src/main/java/org/apache/beam/sdk/extensions/ml/BatchRequestForDLP.java
index b50f98b0646..821aeec3e0b 100644
--- a/sdks/java/extensions/ml/src/main/java/org/apache/beam/sdk/extensions/ml/BatchRequestForDLP.java
+++ b/sdks/java/extensions/ml/src/main/java/org/apache/beam/sdk/extensions/ml/BatchRequestForDLP.java
@@ -39,7 +39,8 @@ import org.slf4j.LoggerFactory;
 
 /** Batches input rows to reduce number of requests sent to Cloud DLP service. */
 @Experimental
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 class BatchRequestForDLP extends DoFn<KV<String, Table.Row>, KV<String, Iterable<Table.Row>>> {
   private static final Logger LOG = LoggerFactory.getLogger(BatchRequestForDLP.class);
diff --git a/sdks/java/extensions/python/src/main/java/org/apache/beam/sdk/extensions/python/PythonExternalTransform.java b/sdks/java/extensions/python/src/main/java/org/apache/beam/sdk/extensions/python/PythonExternalTransform.java
index c81a320c424..109da776322 100644
--- a/sdks/java/extensions/python/src/main/java/org/apache/beam/sdk/extensions/python/PythonExternalTransform.java
+++ b/sdks/java/extensions/python/src/main/java/org/apache/beam/sdk/extensions/python/PythonExternalTransform.java
@@ -81,7 +81,8 @@ public class PythonExternalTransform<InputT extends PInput, OutputT extends POut
     this.expansionService = expansionService;
     this.kwargsMap = new TreeMap<>();
     this.typeHints = new HashMap<>();
-    // TODO(BEAM-14458): remove a default type hint for PythonCallableSource when BEAM-14458 is
+    // TODO(https://github.com/apache/beam/issues/21567): remove a default type hint for
+    // PythonCallableSource when https://github.com/apache/beam/issues/21567 is
     // resolved
     this.typeHints.put(
         PythonCallableSource.class, Schema.FieldType.logicalType(new PythonCallable()));
diff --git a/sdks/java/extensions/python/src/test/java/org/apache/beam/sdk/extensions/python/PythonExternalTransformTest.java b/sdks/java/extensions/python/src/test/java/org/apache/beam/sdk/extensions/python/PythonExternalTransformTest.java
index 5d55a6e8d34..822708cfef3 100644
--- a/sdks/java/extensions/python/src/test/java/org/apache/beam/sdk/extensions/python/PythonExternalTransformTest.java
+++ b/sdks/java/extensions/python/src/test/java/org/apache/beam/sdk/extensions/python/PythonExternalTransformTest.java
@@ -45,7 +45,7 @@ import org.junit.runners.JUnit4;
 
 @RunWith(JUnit4.class)
 public class PythonExternalTransformTest implements Serializable {
-  @Ignore("BEAM-14148")
+  @Ignore("https://github.com/apache/beam/issues/21561")
   @Test
   public void trivialPythonTransform() {
     Pipeline p = Pipeline.create();
diff --git a/sdks/java/extensions/sbe/src/main/java/org/apache/beam/sdk/extensions/sbe/IrFieldGenerator.java b/sdks/java/extensions/sbe/src/main/java/org/apache/beam/sdk/extensions/sbe/IrFieldGenerator.java
index d578507c45e..5573d57ff01 100644
--- a/sdks/java/extensions/sbe/src/main/java/org/apache/beam/sdk/extensions/sbe/IrFieldGenerator.java
+++ b/sdks/java/extensions/sbe/src/main/java/org/apache/beam/sdk/extensions/sbe/IrFieldGenerator.java
@@ -54,7 +54,7 @@ final class IrFieldGenerator {
           fields.add(processPrimitive(iterator));
           break;
         default:
-          // TODO(BEAM-12697): Support remaining field types
+          // TODO(https://github.com/apache/beam/issues/21102): Support remaining field types
           break;
       }
     }
diff --git a/sdks/java/extensions/sbe/src/main/java/org/apache/beam/sdk/extensions/sbe/PrimitiveSbeField.java b/sdks/java/extensions/sbe/src/main/java/org/apache/beam/sdk/extensions/sbe/PrimitiveSbeField.java
index b69c77df5d4..e460831a903 100644
--- a/sdks/java/extensions/sbe/src/main/java/org/apache/beam/sdk/extensions/sbe/PrimitiveSbeField.java
+++ b/sdks/java/extensions/sbe/src/main/java/org/apache/beam/sdk/extensions/sbe/PrimitiveSbeField.java
@@ -48,8 +48,9 @@ abstract class PrimitiveSbeField implements SbeField {
   private FieldType beamType(SbeFieldOptions options) {
     switch (type()) {
       case CHAR:
-        // TODO(BEAM-12697): Support char types
-        throw new UnsupportedOperationException("char type not supported yet (BEAM-12697)");
+        // TODO(https://github.com/apache/beam/issues/21102): Support char types
+        throw new UnsupportedOperationException(
+            "char type not supported yet (https://github.com/apache/beam/issues/21102)");
       case INT8:
         return FieldType.BYTE;
       case INT16:
diff --git a/sdks/java/extensions/sql/build.gradle b/sdks/java/extensions/sql/build.gradle
index 9e6756b5adf..a99ce3a800d 100644
--- a/sdks/java/extensions/sql/build.gradle
+++ b/sdks/java/extensions/sql/build.gradle
@@ -29,7 +29,7 @@ applyJavaNature(
   ],
   automaticModuleName: 'org.apache.beam.sdk.extensions.sql',
   classesTriggerCheckerBugs: [
-    // TODO(BEAM-12687): This currently crashes with checkerframework 3.10.0
+    // TODO(https://github.com/apache/beam/issues/21068): This currently crashes with checkerframework 3.10.0
     // when compiling :sdks:java:extensions:sql:compileJava with:
     // message: class file for com.google.datastore.v1.Entity not found
     // ; The Checker Framework crashed.  Please report the crash.
@@ -37,7 +37,7 @@ applyJavaNature(
     // Last visited tree at line 49 column 1:
     // @AutoService(TableProvider.class)
     // Exception: com.sun.tools.javac.code.Symbol$CompletionFailure: class file for com.google.datastore.v1.Entity not found; com.sun.tools.javac.code.Symbol$CompletionFailure: class file for com.google.datastore.v1.Entity not found
-    'DataStoreV1TableProvider': 'TODO(BEAM-12687): Report the crash if still occurring on newest version',
+    'DataStoreV1TableProvider': 'TODO(https://github.com/apache/beam/issues/21068): Report the crash if still occurring on newest version',
   ],
   // javacc generated code produces lint warnings
   disableLintWarnings: ['dep-ann', 'rawtypes'],
@@ -66,7 +66,7 @@ hadoopVersions.each {kv -> configurations.create("hadoopVersion$kv.key")}
 dependencies {
   implementation enforcedPlatform(library.java.google_cloud_platform_libraries_bom)
 
-  // TODO(BEAM-13179): Determine how to build without this dependency
+  // TODO(https://github.com/apache/beam/issues/21156): Determine how to build without this dependency
   provided "org.immutables:value:2.8.8"
   permitUnusedDeclared "org.immutables:value:2.8.8"
   javacc "net.java.dev.javacc:javacc:4.0"
diff --git a/sdks/java/extensions/sql/hcatalog/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/hcatalog/BeamSqlHiveSchemaTest.java b/sdks/java/extensions/sql/hcatalog/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/hcatalog/BeamSqlHiveSchemaTest.java
index 352c316722d..f62973cc6c7 100644
--- a/sdks/java/extensions/sql/hcatalog/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/hcatalog/BeamSqlHiveSchemaTest.java
+++ b/sdks/java/extensions/sql/hcatalog/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/hcatalog/BeamSqlHiveSchemaTest.java
@@ -70,7 +70,8 @@ public class BeamSqlHiveSchemaTest implements Serializable {
 
   @BeforeClass
   public static void setupEmbeddedMetastoreService() throws IOException {
-    // TODO(BEAM-13506): Remove this when hive version 4 is released and includes
+    // TODO(https://github.com/apache/beam/issues/21299): Remove this when hive version 4 is
+    // released and includes
     // https://github.com/apache/hive/commit/a234475faa2cab2606f2a74eb9ca071f006998e2
     assumeFalse(SystemUtils.isJavaVersionAtLeast(1.9f));
     service = new EmbeddedMetastoreService(TMP_FOLDER.getRoot().getAbsolutePath());
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/ScalarFunctionImpl.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/ScalarFunctionImpl.java
index 3433a86949f..3249498d409 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/ScalarFunctionImpl.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/ScalarFunctionImpl.java
@@ -164,7 +164,8 @@ public class ScalarFunctionImpl extends UdfImplReflectiveFunctionBase
 
       final List<Expression> translated = new ArrayList<>();
       for (int i = 0; i < expressions.size(); i++) {
-        // TODO: [BEAM-8255] Add support for user defined function with var-arg
+        // TODO: [https://github.com/apache/beam/issues/19825] Add support for user defined function
+        // with var-arg
         // Ex: types: [String[].class], expression: [param1, param2, ...]
         translated.add(translate(types.get(i), expressions.get(i)));
       }
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/parser/SqlCreateFunction.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/parser/SqlCreateFunction.java
index 06ac8c5a1cb..015a87e3374 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/parser/SqlCreateFunction.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/parser/SqlCreateFunction.java
@@ -100,7 +100,7 @@ public class SqlCreateFunction extends SqlCreate implements BeamSqlParser.Execut
           RESOURCE.internal(String.format("Function %s is already defined.", lastName)));
     }
     JavaUdfLoader udfLoader = new JavaUdfLoader();
-    // TODO(BEAM-12355) Support qualified function names.
+    // TODO(https://github.com/apache/beam/issues/20834) Support qualified function names.
     List<String> functionPath = ImmutableList.of(lastName);
     if (!(jarPath instanceof SqlCharStringLiteral)) {
       throw SqlUtil.newContextException(
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamCalcRel.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamCalcRel.java
index 44c16a6f9db..804715f4362 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamCalcRel.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamCalcRel.java
@@ -113,7 +113,9 @@ import org.slf4j.LoggerFactory;
   "rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
   "keyfor",
   "nullness",
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 }) // TODO(https://github.com/apache/beam/issues/20497)
 public class BeamCalcRel extends AbstractBeamCalcRel {
 
@@ -351,7 +353,7 @@ public class BeamCalcRel extends AbstractBeamCalcRel {
       return null;
     }
     switch (fieldType.getTypeName()) {
-        // BEAM-12176: Numbers aren't always the type we expect.
+        // https://github.com/apache/beam/issues/20864: Numbers aren't always the type we expect.
       case BYTE:
         return ((Number) value).byteValue();
       case INT16:
@@ -410,13 +412,15 @@ public class BeamCalcRel extends AbstractBeamCalcRel {
           if (value instanceof Date) {
             value = SqlFunctions.toInt((Date) value);
           }
-          // BEAM-12175: value should always be Integer here, but it isn't.
+          // https://github.com/apache/beam/issues/20865: value should always be Integer here, but
+          // it isn't.
           return LocalDate.ofEpochDay(((Number) value).longValue());
         } else if (SqlTypes.TIME.getIdentifier().equals(identifier)) {
           if (value instanceof Time) {
             value = SqlFunctions.toInt((Time) value);
           }
-          // BEAM-12175: value should always be Integer here, but it isn't.
+          // https://github.com/apache/beam/issues/20865: value should always be Integer here, but
+          // it isn't.
           return LocalTime.ofNanoOfDay(((Number) value).longValue() * NANOS_PER_MILLISECOND);
         } else if (SqlTypes.DATETIME.getIdentifier().equals(identifier)) {
           if (value instanceof Timestamp) {
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRel.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRel.java
index 13be5f4cec7..f316c68f27e 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRel.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRel.java
@@ -95,7 +95,9 @@ import org.checkerframework.checker.nullness.qual.Nullable;
 @SuppressWarnings({
   "rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
   "nullness", // TODO(https://github.com/apache/beam/issues/20497)
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class BeamSortRel extends Sort implements BeamRelNode {
   private List<Integer> fieldIndices = new ArrayList<>();
@@ -190,7 +192,7 @@ public class BeamSortRel extends Sort implements BeamRelNode {
       //    works only on bounded data.
       //  - Just LIMIT operates on unbounded data, but across windows.
       if (fieldIndices.isEmpty()) {
-        // TODO(https://issues.apache.org/jira/projects/BEAM/issues/BEAM-4702)
+        // TODO(https://github.com/apache/beam/issues/19075)
         // Figure out which operations are per-window and which are not.
 
         return upstream
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/SchemaIOTableProviderWrapper.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/SchemaIOTableProviderWrapper.java
index c6dbc302b9c..90dea3546dc 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/SchemaIOTableProviderWrapper.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/SchemaIOTableProviderWrapper.java
@@ -133,7 +133,7 @@ public abstract class SchemaIOTableProviderWrapper extends InMemoryMetaTableProv
       if (!(filters instanceof DefaultTableFilter)) {
         throw new UnsupportedOperationException(
             String.format(
-                "Filter pushdown is not yet supported in %s. BEAM-12663",
+                "Filter pushdown is not yet supported in %s. https://github.com/apache/beam/issues/21001",
                 SchemaIOTableWrapper.class));
       }
       if (!fieldNames.isEmpty()) {
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/avro/AvroTableProvider.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/avro/AvroTableProvider.java
index f4c04c41090..4df6a2b2795 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/avro/AvroTableProvider.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/avro/AvroTableProvider.java
@@ -49,7 +49,8 @@ public class AvroTableProvider extends SchemaIOTableProviderWrapper {
     return new AvroSchemaIOProvider();
   }
 
-  // TODO[BEAM-10516]: remove this override after TableProvider problem is fixed
+  // TODO[https://github.com/apache/beam/issues/20430]: remove this override after TableProvider
+  // problem is fixed
   @Override
   public String getTableType() {
     return "avro";
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/bigquery/BeamSqlUnparseContext.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/bigquery/BeamSqlUnparseContext.java
index 7c28d4ba41d..ff9b4e69405 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/bigquery/BeamSqlUnparseContext.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/bigquery/BeamSqlUnparseContext.java
@@ -79,7 +79,7 @@ public class BeamSqlUnparseContext extends SqlImplementor.SimpleContext {
               })
           // \b, \n, \t, \f, \r
           .with(new LookupTranslator(EntityArrays.JAVA_CTRL_CHARS_ESCAPE()))
-          // TODO(BEAM-9180): Add support for \Uhhhhhhhh
+          // TODO(https://github.com/apache/beam/issues/19981): Add support for \Uhhhhhhhh
           // Unicode (only 4 hex digits)
           .with(JavaUnicodeEscaper.outsideOf(32, 0x7f));
 
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbTable.java
index 1803d1f7132..3366d9ad988 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbTable.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbTable.java
@@ -320,7 +320,8 @@ public class MongoDbTable extends SchemaBaseBeamTable implements Serializable {
 
     @Override
     public PCollection<Row> expand(PCollection<Document> input) {
-      // TODO(BEAM-8498): figure out a way convert Document directly to Row.
+      // TODO(https://github.com/apache/beam/issues/19845): figure out a way convert Document
+      // directly to Row.
       return input
           .apply("Convert Document to JSON", ParDo.of(new DocumentToJsonStringConverter()))
           .apply("Transform JSON to Row", JsonToRow.withSchema(schema))
@@ -352,7 +353,8 @@ public class MongoDbTable extends SchemaBaseBeamTable implements Serializable {
     @Override
     public PCollection<Document> expand(PCollection<Row> input) {
       return input
-          // TODO(BEAM-8498): figure out a way convert Row directly to Document.
+          // TODO(https://github.com/apache/beam/issues/19845): figure out a way convert Row
+          // directly to Document.
           .apply("Transform Rows to JSON", ToJson.of())
           .apply("Produce documents from JSON", MapElements.via(new ObjectToDocumentFn()));
     }
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/pubsub/PubsubTableProvider.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/pubsub/PubsubTableProvider.java
index 83231f6118b..cf9f7542ef8 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/pubsub/PubsubTableProvider.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/pubsub/PubsubTableProvider.java
@@ -41,7 +41,8 @@ public class PubsubTableProvider extends SchemaIOTableProviderWrapper {
     return new PubsubSchemaIOProvider();
   }
 
-  // TODO[BEAM-10516]: remove this override after TableProvider problem is fixed
+  // TODO[https://github.com/apache/beam/issues/20430]: remove this override after TableProvider
+  // problem is fixed
   @Override
   public String getTableType() {
     return "pubsub";
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlExplainTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlExplainTest.java
index 2180bd83150..06e30936b4c 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlExplainTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlExplainTest.java
@@ -61,7 +61,7 @@ public class BeamSqlExplainTest {
             + "COMMENT '' ");
   }
 
-  // TODO: (BEAM-4561) 5/30/2017 The test here is too fragile.
+  // TODO: (https://github.com/apache/beam/issues/19059) 5/30/2017 The test here is too fragile.
   @Ignore
   public void testExplainCommaJoin() {
     String plan = cli.explainQuery("SELECT A.c1, B.c2 FROM A, B WHERE A.c1 = B.c2 AND A.c1 > 0");
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlDateFunctionsIntegrationTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlDateFunctionsIntegrationTest.java
index dc6313d28c0..d612c9d1914 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlDateFunctionsIntegrationTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlDateFunctionsIntegrationTest.java
@@ -39,7 +39,7 @@ public class BeamSqlDateFunctionsIntegrationTest
     extends BeamSqlBuiltinFunctionsIntegrationTestBase {
 
   @Test
-  @Ignore("https://jira.apache.org/jira/browse/BEAM-10328")
+  @Ignore("https://github.com/apache/beam/issues/20339")
   public void testDateTimeFunctions_currentTime() throws Exception {
     String sql =
         "SELECT "
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/kafka/KafkaTableProviderIT.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/kafka/KafkaTableProviderIT.java
index bcc84ecb7eb..21f0d4fb3e1 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/kafka/KafkaTableProviderIT.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/kafka/KafkaTableProviderIT.java
@@ -90,7 +90,8 @@ import org.testcontainers.utility.DockerImageName;
 
 /** Integration Test utility for KafkaTableProvider implementations. */
 @RunWith(Parameterized.class)
-// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
+// released (2.11.0)
 @SuppressWarnings("unused")
 public class KafkaTableProviderIT {
   private static final String KAFKA_CONTAINER_VERSION = "5.5.2";
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/pubsub/PubsubTableProviderIT.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/pubsub/PubsubTableProviderIT.java
index 87b5cc4e2ef..f8d8ff3098a 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/pubsub/PubsubTableProviderIT.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/pubsub/PubsubTableProviderIT.java
@@ -586,8 +586,9 @@ public class PubsubTableProviderIT implements Serializable {
     BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider());
     sqlEnv.executeDdl(createTableString);
 
-    // TODO(BEAM-8741): Ideally we could write this query without specifying a column list, because
-    //   it shouldn't be possible to write to event_timestamp when it's mapped to  publish time.
+    // TODO(https://github.com/apache/beam/issues/19875): Ideally we could write this query without
+    //   specifying a column list, because it shouldn't be possible to write to event_timestamp
+    //   when it's mapped to  publish time.
     String queryString =
         "INSERT INTO message (name, height, knows_javascript) \n"
             + "VALUES \n"
@@ -705,9 +706,9 @@ public class PubsubTableProviderIT implements Serializable {
     sqlEnv.executeDdl(createTableString);
     sqlEnv.executeDdl(createFilteredTableString);
 
-    // TODO(BEAM-8741): Ideally we could write these queries without specifying a column list,
-    // because
-    //   it shouldn't be possible to write to event_timestamp when it's mapped to  publish time.
+    // TODO(https://github.com/apache/beam/issues/19875): Ideally we could write these queries
+    //   without specifying a column list, because it shouldn't be possible to write to
+    //   event_timestamp when it's mapped to  publish time.
     String filterQueryString =
         "INSERT INTO javascript_people (name, height) (\n"
             + "  SELECT \n"
diff --git a/sdks/java/extensions/sql/zetasql/build.gradle b/sdks/java/extensions/sql/zetasql/build.gradle
index 4e544d81f43..bb8ee55330f 100644
--- a/sdks/java/extensions/sql/zetasql/build.gradle
+++ b/sdks/java/extensions/sql/zetasql/build.gradle
@@ -30,7 +30,7 @@ ext.summary = "ZetaSQL to Calcite translator"
 def zetasql_version = "2022.04.1"
 
 dependencies {
-  // TODO(BEAM-13179): Determine how to build without this dependency
+  // TODO(https://github.com/apache/beam/issues/21156): Determine how to build without this dependency
   provided "org.immutables:value:2.8.8"
   permitUnusedDeclared "org.immutables:value:2.8.8"
   implementation enforcedPlatform(library.java.google_cloud_platform_libraries_bom)
diff --git a/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/BeamZetaSqlCalcRel.java b/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/BeamZetaSqlCalcRel.java
index 8cc0afb54c3..744fbd0bcd4 100644
--- a/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/BeamZetaSqlCalcRel.java
+++ b/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/BeamZetaSqlCalcRel.java
@@ -74,7 +74,8 @@ import org.joda.time.Instant;
  * expression evaluator.
  */
 @SuppressWarnings(
-    "unused") // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+    "unused") // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+// errorprone is released (2.11.0)
 @Internal
 public class BeamZetaSqlCalcRel extends AbstractBeamCalcRel {
 
@@ -280,8 +281,8 @@ public class BeamZetaSqlCalcRel extends AbstractBeamCalcRel {
       }
     }
 
-    // TODO(BEAM-1287): Remove this when FinishBundle has added support for an {@link
-    // OutputReceiver}
+    // TODO(https://github.com/apache/beam/issues/18203): Remove this when FinishBundle has added
+    // support for an {@link OutputReceiver}
     private static class OutputReceiverForFinishBundle implements OutputReceiver<Row> {
 
       private final FinishBundleContext c;
diff --git a/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/BeamZetaSqlCatalog.java b/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/BeamZetaSqlCatalog.java
index d405d071186..985561b5a3a 100644
--- a/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/BeamZetaSqlCatalog.java
+++ b/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/BeamZetaSqlCatalog.java
@@ -357,7 +357,7 @@ public class BeamZetaSqlCatalog {
         throw new IllegalArgumentException(
             String.format(
                 "Expected exactly 1 definition for function '%s', but found %d."
-                    + " Beam ZetaSQL supports only a single function definition per function name (BEAM-12073).",
+                    + " Beam ZetaSQL supports only a single function definition per function name (https://github.com/apache/beam/issues/20828).",
                 functionName, functions.size()));
       }
       for (org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Function function :
@@ -465,7 +465,7 @@ public class BeamZetaSqlCatalog {
       case "SQL":
         if (createFunctionStmt.getIsAggregate()) {
           throw new UnsupportedOperationException(
-              "Native SQL aggregate functions are not supported (BEAM-9954).");
+              "Native SQL aggregate functions are not supported (https://github.com/apache/beam/issues/20193).");
         }
         return USER_DEFINED_SQL_FUNCTIONS;
       case "PY":
diff --git a/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSQLQueryPlanner.java b/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSQLQueryPlanner.java
index c53ca4d39a4..ecd8af1fe16 100644
--- a/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSQLQueryPlanner.java
+++ b/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSQLQueryPlanner.java
@@ -127,7 +127,8 @@ public class ZetaSQLQueryPlanner implements QueryPlanner {
     for (RuleSet ruleSet : ruleSets) {
       ImmutableList.Builder<RelOptRule> bd = ImmutableList.builder();
       for (RelOptRule rule : ruleSet) {
-        // TODO[BEAM-9075]: Fix join re-ordering for ZetaSQL planner. Currently join re-ordering
+        // TODO[https://github.com/apache/beam/issues/20077]: Fix join re-ordering for ZetaSQL
+        // planner. Currently join re-ordering
         //  requires the JoinCommuteRule, which doesn't work without struct flattening.
         if (rule instanceof JoinCommuteRule) {
           continue;
diff --git a/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlBeamTranslationUtils.java b/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlBeamTranslationUtils.java
index c1dab08fa52..21177590833 100644
--- a/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlBeamTranslationUtils.java
+++ b/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlBeamTranslationUtils.java
@@ -74,7 +74,8 @@ public final class ZetaSqlBeamTranslationUtils {
       case DECIMAL:
         return TypeFactory.createSimpleType(TypeKind.TYPE_NUMERIC);
       case DATETIME:
-        // TODO[BEAM-10238]: Mapping TIMESTAMP to a Beam LogicalType instead?
+        // TODO[https://github.com/apache/beam/issues/20364]: Mapping TIMESTAMP to a Beam
+        // LogicalType instead?
         return TypeFactory.createSimpleType(TypeKind.TYPE_TIMESTAMP);
       case LOGICAL_TYPE:
         String identifier = fieldType.getLogicalType().getIdentifier();
diff --git a/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlCalciteTranslationUtils.java b/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlCalciteTranslationUtils.java
index b03b1d85043..5c4b1c266e0 100644
--- a/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlCalciteTranslationUtils.java
+++ b/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlCalciteTranslationUtils.java
@@ -235,7 +235,8 @@ public final class ZetaSqlCalciteTranslationUtils {
         if (wrapperFun == null) {
           return rexBuilder.makeApproxLiteral(new BigDecimal(val), returnType);
         } else if (BeamBigQuerySqlDialect.DOUBLE_NAN_WRAPPER.equals(wrapperFun)) {
-          // TODO[BEAM-10550]: Update the temporary workaround below after vendored Calcite version.
+          // TODO[https://github.com/apache/beam/issues/20354]: Update the temporary workaround
+          // below after vendored Calcite version.
           // Adding an additional random parameter for the wrapper function of NaN, to avoid
           // triggering Calcite operation simplification. (e.g. 'NaN == NaN' would be simplify to
           // 'null or NaN is not null' in Calcite. This would miscalculate the expression to be
diff --git a/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/translation/SqlOperators.java b/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/translation/SqlOperators.java
index 824c7ef10e4..59d54063d96 100644
--- a/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/translation/SqlOperators.java
+++ b/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/translation/SqlOperators.java
@@ -190,7 +190,7 @@ public class SqlOperators {
       if (resolvedExpr instanceof ResolvedNodes.ResolvedLiteral) {
         delimiter = ((ResolvedNodes.ResolvedLiteral) resolvedExpr).getValue();
       } else {
-        // TODO(BEAM-13673) Add support for params
+        // TODO(https://github.com/apache/beam/issues/21283) Add support for params
         throw new ZetaSqlException(
             new StatusRuntimeException(
                 Status.INVALID_ARGUMENT.withDescription(
diff --git a/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/StreamingSqlTest.java b/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/StreamingSqlTest.java
index 4f985ee9afd..4fd3b35724f 100644
--- a/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/StreamingSqlTest.java
+++ b/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/StreamingSqlTest.java
@@ -529,7 +529,8 @@ public class StreamingSqlTest extends ZetaSqlTestBase {
   }
 
   @Test
-  @Ignore("[BEAM-9191] CAST operator does not work fully due to bugs in unparsing")
+  @Ignore(
+      "[https://github.com/apache/beam/issues/20101] CAST operator does not work fully due to bugs in unparsing")
   public void testZetaSQLStructFieldAccessInTumble() {
     String sql =
         "SELECT TUMBLE_START('INTERVAL 1 MINUTE') FROM table_with_struct_ts_string AS A GROUP BY "
diff --git a/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlDialectSpecTest.java b/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlDialectSpecTest.java
index eaba01ba782..ec5ea89f97a 100644
--- a/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlDialectSpecTest.java
+++ b/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlDialectSpecTest.java
@@ -1346,7 +1346,8 @@ public class ZetaSqlDialectSpecTest extends ZetaSqlTestBase {
   }
 
   @Test
-  @Ignore("[BEAM-9191] CAST operator does not work fully due to bugs in unparsing")
+  @Ignore(
+      "[https://github.com/apache/beam/issues/20101] CAST operator does not work fully due to bugs in unparsing")
   public void testZetaSQLStructFieldAccessInCast2() {
     String sql =
         "SELECT CAST(A.struct_col.struct_col_str AS TIMESTAMP) FROM table_with_struct_ts_string AS"
@@ -2293,7 +2294,8 @@ public class ZetaSqlDialectSpecTest extends ZetaSqlTestBase {
   }
 
   @Test
-  @Ignore("[BEAM-9515] ArrayScanToUncollectConverter Unnest does not support sub-queries")
+  @Ignore(
+      "[https://github.com/apache/beam/issues/20139] ArrayScanToUncollectConverter Unnest does not support sub-queries")
   public void testUNNESTExpression() {
     String sql = "SELECT * FROM UNNEST(ARRAY(SELECT Value FROM KeyValue));";
     ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
@@ -3411,7 +3413,7 @@ public class ZetaSqlDialectSpecTest extends ZetaSqlTestBase {
   }
 
   @Test
-  @Ignore("https://jira.apache.org/jira/browse/BEAM-9191")
+  @Ignore("https://github.com/apache/beam/issues/20101")
   public void testCastBytesToString1() {
     String sql = "SELECT CAST(@p0 AS STRING)";
     ImmutableMap<String, Value> params =
@@ -3436,7 +3438,7 @@ public class ZetaSqlDialectSpecTest extends ZetaSqlTestBase {
   }
 
   @Test
-  @Ignore("https://jira.apache.org/jira/browse/BEAM-9191")
+  @Ignore("https://github.com/apache/beam/issues/20101")
   public void testCastBytesToStringFromTable() {
     String sql = "SELECT CAST(bytes_col AS STRING) FROM table_all_types";
     PCollection<Row> stream = execute(sql);
@@ -3492,7 +3494,7 @@ public class ZetaSqlDialectSpecTest extends ZetaSqlTestBase {
   }
 
   @Test
-  @Ignore("https://jira.apache.org/jira/browse/BEAM-10340")
+  @Ignore("https://github.com/apache/beam/issues/20351")
   public void testCastBetweenTimeAndString() {
     String sql =
         "SELECT CAST(s1 as TIME) as t2, CAST(t1 as STRING) as s2 FROM "
@@ -3548,7 +3550,7 @@ public class ZetaSqlDialectSpecTest extends ZetaSqlTestBase {
   }
 
   @Test
-  @Ignore("[BEAM-8593] ZetaSQL does not support Map type")
+  @Ignore("[https://github.com/apache/beam/issues/19963] ZetaSQL does not support Map type")
   public void testSelectFromTableWithMap() {
     String sql = "SELECT row_field FROM table_with_map";
     PCollection<Row> stream = execute(sql);
diff --git a/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlJavaUdfTest.java b/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlJavaUdfTest.java
index da5bb339b42..7f64acbeeec 100644
--- a/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlJavaUdfTest.java
+++ b/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlJavaUdfTest.java
@@ -259,7 +259,8 @@ public class ZetaSqlJavaUdfTest extends ZetaSqlTestBase {
             jarPath);
     ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
     BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
-    // TODO(BEAM-11171) This should fail earlier, before compiling the CalcFn.
+    // TODO(https://github.com/apache/beam/issues/20614) This should fail earlier, before compiling
+    // the CalcFn.
     thrown.expect(UnsupportedOperationException.class);
     thrown.expectMessage("Could not compile CalcFn");
     thrown.expectCause(
@@ -283,7 +284,8 @@ public class ZetaSqlJavaUdfTest extends ZetaSqlTestBase {
             jarPath);
     ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
     BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
-    // TODO(BEAM-11171) This should fail earlier, before compiling the CalcFn.
+    // TODO(https://github.com/apache/beam/issues/20614) This should fail earlier, before compiling
+    // the CalcFn.
     thrown.expect(UnsupportedOperationException.class);
     thrown.expectMessage("Could not compile CalcFn");
     thrown.expectCause(
diff --git a/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlNativeUdfTest.java b/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlNativeUdfTest.java
index de7e8561c01..9f37472cfae 100644
--- a/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlNativeUdfTest.java
+++ b/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlNativeUdfTest.java
@@ -257,7 +257,8 @@ public class ZetaSqlNativeUdfTest extends ZetaSqlTestBase {
 
     ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
     thrown.expect(UnsupportedOperationException.class);
-    thrown.expectMessage("Native SQL aggregate functions are not supported (BEAM-9954).");
+    thrown.expectMessage(
+        "Native SQL aggregate functions are not supported (https://github.com/apache/beam/issues/20193).");
     zetaSQLQueryPlanner.convertToBeamRel(sql);
   }
 }
diff --git a/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlTimeFunctionsTest.java b/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlTimeFunctionsTest.java
index 3e3ec8d5d71..dcd60ea76dd 100644
--- a/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlTimeFunctionsTest.java
+++ b/sdks/java/extensions/sql/zetasql/src/test/java/org/apache/beam/sdk/extensions/sql/zetasql/ZetaSqlTimeFunctionsTest.java
@@ -137,7 +137,8 @@ public class ZetaSqlTimeFunctionsTest extends ZetaSqlTestBase {
     pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
   }
 
-  // TODO[BEAM-9166]: Add a test for CURRENT_DATE function ("SELECT CURRENT_DATE()")
+  // TODO[https://github.com/apache/beam/issues/19980]: Add a test for CURRENT_DATE function
+  // ("SELECT CURRENT_DATE()")
 
   @Test
   public void testExtractFromDate() {
@@ -150,7 +151,8 @@ public class ZetaSqlTimeFunctionsTest extends ZetaSqlTestBase {
             + "  EXTRACT(ISOYEAR FROM date) AS isoyear,\n"
             + "  EXTRACT(YEAR FROM date) AS year,\n"
             + "  EXTRACT(ISOWEEK FROM date) AS isoweek,\n"
-            // TODO[BEAM-10606]: Add tests for DATE_TRUNC and EXTRACT with "week with weekday" date
+            // TODO[https://github.com/apache/beam/issues/20338]: Add tests for DATE_TRUNC and
+            // EXTRACT with "week with weekday" date
             //  parts once they are supported
             // + "  EXTRACT(WEEK FROM date) AS week,\n"
             + "  EXTRACT(MONTH FROM date) AS month,\n"
@@ -537,7 +539,8 @@ public class ZetaSqlTimeFunctionsTest extends ZetaSqlTestBase {
     pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
   }
 
-  // TODO[BEAM-9166]: Add a test for CURRENT_TIME function ("SELECT CURRENT_TIME()")
+  // TODO[https://github.com/apache/beam/issues/19980]: Add a test for CURRENT_TIME function
+  // ("SELECT CURRENT_TIME()")
 
   @Test
   public void testExtractFromTime() {
@@ -867,7 +870,8 @@ public class ZetaSqlTimeFunctionsTest extends ZetaSqlTestBase {
     pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
   }
 
-  // TODO[BEAM-9166]: Add a test for CURRENT_DATETIME function ("SELECT CURRENT_DATETIME()")
+  // TODO[https://github.com/apache/beam/issues/19980]: Add a test for CURRENT_DATETIME function
+  // ("SELECT CURRENT_DATETIME()")
 
   @Test
   public void testExtractFromDateTime() {
@@ -876,7 +880,8 @@ public class ZetaSqlTimeFunctionsTest extends ZetaSqlTestBase {
             + "EXTRACT(YEAR FROM DATETIME '2008-12-25 15:30:00') as year, "
             + "EXTRACT(QUARTER FROM DATETIME '2008-12-25 15:30:00') as quarter, "
             + "EXTRACT(MONTH FROM DATETIME '2008-12-25 15:30:00') as month, "
-            // TODO[BEAM-10606]: Add tests for DATETIME_TRUNC and EXTRACT with "week with weekday"
+            // TODO[https://github.com/apache/beam/issues/20338]: Add tests for DATETIME_TRUNC and
+            // EXTRACT with "week with weekday"
             //  date parts once they are supported
             // + "EXTRACT(WEEK FROM DATETIME '2008-12-25 15:30:00') as week, "
             + "EXTRACT(DAY FROM DATETIME '2008-12-25 15:30:00') as day, "
@@ -1274,7 +1279,8 @@ public class ZetaSqlTimeFunctionsTest extends ZetaSqlTestBase {
     pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
   }
 
-  // TODO[BEAM-9166]: Add a test for CURRENT_TIMESTAMP function ("SELECT CURRENT_TIMESTAMP()")
+  // TODO[https://github.com/apache/beam/issues/19980]: Add a test for CURRENT_TIMESTAMP function
+  // ("SELECT CURRENT_TIMESTAMP()")
 
   @Test
   public void testExtractFromTimestamp() {
@@ -1287,7 +1293,8 @@ public class ZetaSqlTimeFunctionsTest extends ZetaSqlTestBase {
             + "  EXTRACT(ISOYEAR FROM timestamp) AS isoyear,\n"
             + "  EXTRACT(YEAR FROM timestamp) AS year,\n"
             + "  EXTRACT(ISOWEEK FROM timestamp) AS isoweek,\n"
-            // TODO[BEAM-10606]: Add tests for TIMESTAMP_TRUNC and EXTRACT with "week with weekday"
+            // TODO[https://github.com/apache/beam/issues/20338]: Add tests for TIMESTAMP_TRUNC and
+            // EXTRACT with "week with weekday"
             //  date parts once they are supported
             // + "  EXTRACT(WEEK FROM timestamp) AS week,\n"
             + "  EXTRACT(MONTH FROM timestamp) AS month,\n"
diff --git a/sdks/java/harness/jmh/src/main/java/org/apache/beam/fn/harness/jmh/ProcessBundleBenchmark.java b/sdks/java/harness/jmh/src/main/java/org/apache/beam/fn/harness/jmh/ProcessBundleBenchmark.java
index 6017e5f0277..654841d0d0d 100644
--- a/sdks/java/harness/jmh/src/main/java/org/apache/beam/fn/harness/jmh/ProcessBundleBenchmark.java
+++ b/sdks/java/harness/jmh/src/main/java/org/apache/beam/fn/harness/jmh/ProcessBundleBenchmark.java
@@ -315,7 +315,9 @@ public class ProcessBundleBenchmark {
     final StateRequestHandler cachingStateRequestHandler;
 
     @SuppressWarnings({
-      "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+      // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+      // errorprone is released (2.11.0)
+      "unused"
     })
     private static class StatefulOutputZeroOneTwo
         extends DoFn<KV<String, String>, KV<String, String>> {
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/AssignWindowsRunner.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/AssignWindowsRunner.java
index a6e147ed1ef..aa320099ffd 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/AssignWindowsRunner.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/AssignWindowsRunner.java
@@ -89,7 +89,8 @@ class AssignWindowsRunner<T, W extends BoundedWindow> {
   }
 
   WindowedValue<T> assignWindows(WindowedValue<T> input) throws Exception {
-    // TODO: BEAM-4272 consider allocating only once and updating the current value per call.
+    // TODO: https://github.com/apache/beam/issues/18870 consider allocating only once and updating
+    // the current value per call.
     WindowFn<T, W>.AssignContext ctxt =
         windowFn.new AssignContext() {
           @Override
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnHarness.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnHarness.java
index 8029b4da110..1bf20d5490c 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnHarness.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnHarness.java
@@ -290,7 +290,8 @@ public class FnHarness {
                 processWideCache);
       }
 
-      // TODO(BEAM-9729): Remove once runners no longer send this instruction.
+      // TODO(https://github.com/apache/beam/issues/20270): Remove once runners no longer send this
+      // instruction.
       handlers.put(
           BeamFnApi.InstructionRequest.RequestCase.REGISTER,
           request ->
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/PCollectionConsumerRegistry.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/PCollectionConsumerRegistry.java
index 9c70d4b6a09..bff3c0cc6f9 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/PCollectionConsumerRegistry.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/PCollectionConsumerRegistry.java
@@ -244,7 +244,8 @@ public class PCollectionConsumerRegistry {
     public void accept(WindowedValue<T> input) throws Exception {
       // Increment the counter for each window the element occurs in.
       this.unboundedElementCountCounter.inc(input.getWindows().size());
-      // TODO(BEAM-11879): Consider updating size per window when we have window optimization.
+      // TODO(https://github.com/apache/beam/issues/20730): Consider updating size per window when
+      // we have window optimization.
       this.unboundedSampledByteSizeDistribution.tryUpdate(input.getValue(), this.coder);
       // Wrap the consumer with extra logic to set the metric container with the appropriate
       // PTransform context. This ensures that user metrics obtain the pTransform ID when they are
@@ -304,7 +305,8 @@ public class PCollectionConsumerRegistry {
       for (ConsumerAndMetadata consumerAndMetadata : consumerAndMetadatas) {
 
         if (consumerAndMetadata.getValueCoder() != null) {
-          // TODO(BEAM-11879): Consider updating size per window when we have window optimization.
+          // TODO(https://github.com/apache/beam/issues/20730): Consider updating size per window
+          // when we have window optimization.
           this.unboundedSampledByteSizeDistribution.tryUpdate(
               input.getValue(), consumerAndMetadata.getValueCoder());
         }
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/FnApiDoFnRunnerTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/FnApiDoFnRunnerTest.java
index e67ab0fb1a8..b7cb5d32275 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/FnApiDoFnRunnerTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/FnApiDoFnRunnerTest.java
@@ -161,7 +161,9 @@ import org.junit.runners.JUnit4;
 @RunWith(Enclosed.class)
 @SuppressWarnings({
   "rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class FnApiDoFnRunnerTest implements Serializable {
 
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java
index e63e320eee7..5c7062deafb 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java
@@ -148,7 +148,8 @@ import org.mockito.MockitoAnnotations;
 @RunWith(JUnit4.class)
 @SuppressWarnings({
   "rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
-  "unused", // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  "unused", // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
 })
 public class ProcessBundleHandlerTest {
   private static final String DATA_INPUT_URN = "beam:runner:source:v1";
diff --git a/sdks/java/io/amazon-web-services/src/main/java/org/apache/beam/sdk/io/aws/s3/S3FileSystem.java b/sdks/java/io/amazon-web-services/src/main/java/org/apache/beam/sdk/io/aws/s3/S3FileSystem.java
index de12d57efcd..6e9d60e0250 100644
--- a/sdks/java/io/amazon-web-services/src/main/java/org/apache/beam/sdk/io/aws/s3/S3FileSystem.java
+++ b/sdks/java/io/amazon-web-services/src/main/java/org/apache/beam/sdk/io/aws/s3/S3FileSystem.java
@@ -227,7 +227,8 @@ class S3FileSystem extends FileSystem<S3ResourceId> {
             exception = pathWithEncoding.getException();
             break;
           } else {
-            // TODO(BEAM-11821): Support file checksum in this method.
+            // TODO(https://github.com/apache/beam/issues/20755): Support file checksum in this
+            // method.
             metadatas.add(
                 createBeamMetadata(
                     pathWithEncoding.getPath(), pathWithEncoding.getContentEncoding(), null));
diff --git a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/s3/S3FileSystem.java b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/s3/S3FileSystem.java
index a03f0a2649d..41b3a409215 100644
--- a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/s3/S3FileSystem.java
+++ b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/s3/S3FileSystem.java
@@ -229,7 +229,8 @@ class S3FileSystem extends FileSystem<S3ResourceId> {
                 exception = pathWithEncoding.getException();
                 break;
               } else {
-                // TODO(BEAM-11821): Support file checksum in this method.
+                // TODO(https://github.com/apache/beam/issues/20755): Support file checksum in this
+                // method.
                 metadatas.add(
                     createBeamMetadata(
                         pathWithEncoding.getPath(), pathWithEncoding.getContentEncoding(), null));
diff --git a/sdks/java/io/clickhouse/src/main/java/org/apache/beam/sdk/io/clickhouse/ClickHouseIO.java b/sdks/java/io/clickhouse/src/main/java/org/apache/beam/sdk/io/clickhouse/ClickHouseIO.java
index ea641f1c293..75ad5b49d37 100644
--- a/sdks/java/io/clickhouse/src/main/java/org/apache/beam/sdk/io/clickhouse/ClickHouseIO.java
+++ b/sdks/java/io/clickhouse/src/main/java/org/apache/beam/sdk/io/clickhouse/ClickHouseIO.java
@@ -366,7 +366,7 @@ public class ClickHouseIO {
 
     // TODO: This should be the same as resolved so that Beam knows which fields
     // are being accessed. Currently Beam only supports wildcard descriptors.
-    // Once BEAM-4457 is fixed, fix this.
+    // Once https://github.com/apache/beam/issues/18903 is fixed, fix this.
     @FieldAccess("filterFields")
     final FieldAccessDescriptor fieldAccessDescriptor = FieldAccessDescriptor.withAllFields();
 
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BatchLoads.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BatchLoads.java
index e5124b0047a..7a1d5ff46fe 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BatchLoads.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BatchLoads.java
@@ -748,7 +748,8 @@ class BatchLoads<DestinationT, ElementT>
                 rowWriterFactory.getSourceFormat(),
                 useAvroLogicalTypes,
                 // Note that we can't pass through the schema update options when creating temporary
-                // tables. They also shouldn't be needed. See BEAM-12482 for additional details.
+                // tables. They also shouldn't be needed. See
+                // https://github.com/apache/beam/issues/21105 for additional details.
                 schemaUpdateOptions,
                 tempDataset))
         .setCoder(KvCoder.of(tableDestinationCoder, WriteTables.ResultCoder.INSTANCE));
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
index 66dfd066e0a..119b424996d 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
@@ -1266,7 +1266,7 @@ public class BigQueryIO {
       // table for the query to read the data and subsequently delete the table and dataset. Once
       // the storage API can handle anonymous tables, the storage source should be modified to use
       // anonymous tables and all of the code related to job ID generation and table and dataset
-      // cleanup can be removed. [BEAM-6931]
+      // cleanup can be removed. [https://github.com/apache/beam/issues/19375]
       //
 
       PCollectionView<String> jobIdTokenView;
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java
index 50f3988c1c1..a0ac5d77cca 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java
@@ -1155,9 +1155,9 @@ class BigQueryServicesImpl implements BigQueryServices {
               if (retryPolicy.shouldRetry(new InsertRetryPolicy.Context(error))) {
                 allErrors.add(error);
                 retryRows.add(rowsToPublish.get(errorIndex));
-                // TODO (BEAM-12139): Select the retry rows(using errorIndex) from the batch of rows
-                // which attempted insertion in this call. Not the entire set of rows in
-                // rowsToPublish.
+                // TODO (https://github.com/apache/beam/issues/20891): Select the retry rows(using
+                // errorIndex) from the batch of rows which attempted insertion in this call.
+                // Not the entire set of rows in rowsToPublish.
                 if (retryIds != null) {
                   retryIds.add(idsToPublish.get(errorIndex));
                 }
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageArrowReader.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageArrowReader.java
index 468dcbf5f2e..8f23825b986 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageArrowReader.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageArrowReader.java
@@ -69,7 +69,8 @@ class BigQueryStorageArrowReader implements BigQueryStorageReader {
       throw new IOException("Not Initialized");
     }
     Row row = recordBatchIterator.next();
-    // TODO(BEAM-12551): Update this interface to expect a Row, and avoid converting Arrow data to
+    // TODO(https://github.com/apache/beam/issues/21076): Update this interface to expect a Row, and
+    // avoid converting Arrow data to
     // GenericRecord.
     return AvroUtils.toGenericRecord(row, null);
   }
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageReader.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageReader.java
index b93d64aa0f6..e13a0bdd9d6 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageReader.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageReader.java
@@ -27,7 +27,8 @@ interface BigQueryStorageReader extends AutoCloseable {
 
   long getRowCount();
 
-  // TODO(BEAM-12551): BigQueryStorageReader should produce Rows, rather than GenericRecords
+  // TODO(https://github.com/apache/beam/issues/21076): BigQueryStorageReader should produce Rows,
+  // rather than GenericRecords
   GenericRecord readSingleRecord() throws IOException;
 
   boolean readyForNextReadResponse() throws IOException;
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiWritesShardedRecords.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiWritesShardedRecords.java
index ef128918fc1..d4a3a3ad202 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiWritesShardedRecords.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiWritesShardedRecords.java
@@ -91,7 +91,9 @@ import org.slf4j.LoggerFactory;
 /** A transform to write sharded records to BigQuery using the Storage API. */
 @SuppressWarnings({
   "FutureReturnValueIgnored",
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class StorageApiWritesShardedRecords<DestinationT extends @NonNull Object, ElementT>
     extends PTransform<
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1.java
index 74d6636b28a..3d9a2419567 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1.java
@@ -789,8 +789,9 @@ public final class FirestoreV1 {
    * @see <a target="_blank" rel="noopener noreferrer"
    *     href="https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1#google.firestore.v1.RunQueryResponse">google.firestore.v1.RunQueryResponse</a>
    */
-  // TODO(BEAM-12605): Add dynamic work rebalancing to support a Splittable DoFn
-  // TODO(BEAM-12606): Add support for progress reporting
+  // TODO(https://github.com/apache/beam/issues/21056): Add dynamic work rebalancing to support a
+  // Splittable DoFn
+  // TODO(https://github.com/apache/beam/issues/21050): Add support for progress reporting
   public static final class RunQuery
       extends Transform<
           PCollection<RunQueryRequest>, PCollection<RunQueryResponse>, RunQuery, RunQuery.Builder> {
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/DicomIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/DicomIO.java
index 8bb17f10ca2..3756d69a704 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/DicomIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/DicomIO.java
@@ -174,7 +174,7 @@ public class DicomIO {
       public void processElement(ProcessContext context) {
         String dicomWebPath = context.element();
         try {
-          // TODO [BEAM-11259] Change to non-blocking async calls
+          // TODO [https://github.com/apache/beam/issues/20582] Change to non-blocking async calls
           String responseData = dicomStore.retrieveDicomStudyMetadata(dicomWebPath);
           context.output(METADATA, responseData);
         } catch (IOException e) {
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubMessages.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubMessages.java
index bf6a28863b0..1f0025a003a 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubMessages.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubMessages.java
@@ -32,7 +32,7 @@ public final class PubsubMessages {
     com.google.pubsub.v1.PubsubMessage.Builder message =
         com.google.pubsub.v1.PubsubMessage.newBuilder()
             .setData(ByteString.copyFrom(input.getPayload()));
-    // TODO(BEAM-8085) this should not be null
+    // TODO(https://github.com/apache/beam/issues/19787) this should not be null
     if (attributes != null) {
       message.putAllAttributes(attributes);
     }
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/TestPubsubSignal.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/TestPubsubSignal.java
index 60127b39a61..f4813ec8314 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/TestPubsubSignal.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/TestPubsubSignal.java
@@ -75,7 +75,9 @@ import org.slf4j.LoggerFactory;
  */
 @SuppressWarnings({
   "nullness", // TODO(https://github.com/apache/beam/issues/20497)
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class TestPubsubSignal implements TestRule {
   private static final Logger LOG = LoggerFactory.getLogger(TestPubsubSignal.class);
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java
index ce496557455..b4dd9f7af9c 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java
@@ -1880,8 +1880,8 @@ public class SpannerIO {
       }
     }
 
-    // TODO(BEAM-1287): Remove this when FinishBundle has added support for an {@link
-    // OutputReceiver}
+    // TODO(https://github.com/apache/beam/issues/18203): Remove this when FinishBundle has added
+    // support for an {@link OutputReceiver}
     private static class OutputReceiverForFinishBundle
         implements OutputReceiver<Iterable<MutationGroup>> {
 
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerTransformRegistrar.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerTransformRegistrar.java
index 3574de8d3a9..5301642454d 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerTransformRegistrar.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerTransformRegistrar.java
@@ -117,7 +117,8 @@ public class SpannerTransformRegistrar implements ExternalTransformRegistrar {
       implements ExternalTransformBuilder<ReadBuilder.Configuration, PBegin, PCollection<Row>> {
 
     public static class Configuration extends CrossLanguageConfiguration {
-      // TODO: BEAM-10851 Come up with something to determine schema without this explicit parameter
+      // TODO: https://github.com/apache/beam/issues/20415 Come up with something to determine
+      // schema without this explicit parameter
       private Schema schema = Schema.builder().build();
       private @Nullable String sql;
       private @Nullable String table;
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/healthcare/DicomIOReadIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/healthcare/DicomIOReadIT.java
index 4986f094e53..1469eb0dd11 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/healthcare/DicomIOReadIT.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/healthcare/DicomIOReadIT.java
@@ -61,7 +61,7 @@ public class DicomIOReadIT {
     client.deleteDicomStore(healthcareDataset + "/dicomStores/" + storeName);
   }
 
-  @Ignore("https://jira.apache.org/jira/browse/BEAM-11376")
+  @Ignore("https://github.com/apache/beam/issues/20644")
   @Test
   public void testDicomMetadataRead() throws IOException {
     String webPath =
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsublite/ReadWriteIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsublite/ReadWriteIT.java
index 729c51cf937..d31f06be89e 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsublite/ReadWriteIT.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsublite/ReadWriteIT.java
@@ -151,8 +151,8 @@ public class ReadWriteIT {
     }
   }
 
-  // Workaround for BEAM-12867
-  // TODO(BEAM-12867): Remove this.
+  // Workaround for https://github.com/apache/beam/issues/21257
+  // TODO(https://github.com/apache/beam/issues/21257): Remove this.
   private static class CustomCreate extends PTransform<PCollection<Void>, PCollection<Integer>> {
     @Override
     public PCollection<Integer> expand(PCollection<Void> input) {
@@ -196,7 +196,7 @@ public class ReadWriteIT {
             PubsubLiteIO.read(
                 SubscriberOptions.newBuilder().setSubscriptionPath(subscriptionPath).build()));
     return messages;
-    // TODO(BEAM-13230): Fix and re-enable
+    // TODO(https://github.com/apache/beam/issues/21157): Fix and re-enable
     // Deduplicate messages based on the uuids added in PubsubLiteIO.addUuids() when writing.
     // return messages.apply(
     //   "dedupeMessages", PubsubLiteIO.deduplicate(UuidDeduplicationOptions.newBuilder().build()));
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/SpannerChangeStreamErrorTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/SpannerChangeStreamErrorTest.java
index b995b23a379..0f72f95e3ea 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/SpannerChangeStreamErrorTest.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/SpannerChangeStreamErrorTest.java
@@ -135,7 +135,7 @@ public class SpannerChangeStreamErrorTest implements Serializable {
   }
 
   @Test
-  @Ignore("BEAM-14152")
+  @Ignore("https://github.com/apache/beam/issues/21533")
   public void testUnavailableExceptionRetries() throws InterruptedException {
     DirectOptions options = PipelineOptionsFactory.as(DirectOptions.class);
     options.setBlockOnRun(false);
diff --git a/sdks/java/io/hcatalog/build.gradle b/sdks/java/io/hcatalog/build.gradle
index b26721a65bb..ba8b4bf6441 100644
--- a/sdks/java/io/hcatalog/build.gradle
+++ b/sdks/java/io/hcatalog/build.gradle
@@ -104,4 +104,4 @@ hadoopVersions.each { kv ->
     classpath = configurations."hadoopVersion$kv.key" + sourceSets.test.runtimeClasspath
     include '**/*Test.class'
   }
-}
+}
\ No newline at end of file
diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaExactlyOnceSink.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaExactlyOnceSink.java
index 89acc645b73..ee695d4b63b 100644
--- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaExactlyOnceSink.java
+++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaExactlyOnceSink.java
@@ -91,7 +91,9 @@ import org.slf4j.LoggerFactory;
 @SuppressWarnings({
   "rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
   "nullness", // TODO(https://github.com/apache/beam/issues/20497)
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 class KafkaExactlyOnceSink<K, V>
     extends PTransform<PCollection<ProducerRecord<K, V>>, PCollection<Void>> {
diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
index 8fb2a1c16ec..325d5e94bcf 100644
--- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
+++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
@@ -1331,7 +1331,8 @@ public class KafkaIO {
     private boolean runnerPrefersLegacyRead(PipelineOptions options) {
       // Only Dataflow runner requires sdf read at this moment. For other non-portable runners, if
       // it doesn't specify use_sdf_read, it will use legacy read regarding to performance concern.
-      // TODO(BEAM-10670): Remove this special check when we address performance issue.
+      // TODO(https://github.com/apache/beam/issues/20530): Remove this special check when we
+      // address performance issue.
       if (ExperimentalOptions.hasExperiment(options, "use_sdf_read")) {
         return false;
       }
@@ -1667,8 +1668,9 @@ public class KafkaIO {
   @SuppressFBWarnings("URF_UNREAD_FIELD")
   /**
    * Represents a Kafka record with metadata whey key and values are byte arrays. This class should
-   * only be used to represent a Kafka record for external transforms. TODO(BEAM-7345): use regular
-   * KafkaRecord class when Beam Schema inference supports generics.
+   * only be used to represent a Kafka record for external transforms.
+   * TODO(https://github.com/apache/beam/issues/18919): use regular KafkaRecord class when Beam
+   * Schema inference supports generics.
    */
   static class ByteArrayKafkaRecord {
 
diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIOReadImplementationCompatibility.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIOReadImplementationCompatibility.java
index 2072925b964..9599398c0bd 100644
--- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIOReadImplementationCompatibility.java
+++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIOReadImplementationCompatibility.java
@@ -42,7 +42,7 @@ import org.checkerframework.checker.initialization.qual.UnderInitialization;
  * It is required because the methods used to configure the expected behaviour are shared,<br>
  * and not every configuration is being supported by every implementation.
  */
-// ·TODO(BEAM-14316)·2022-04-15:·Remove·compatibility·testing·related·code·from·KafkaIO.Read·after
+// ·TODO(https://github.com/apache/beam/issues/21482)·2022-04-15:·Remove·compatibility·testing·related·code·from·KafkaIO.Read·after
 // ·SDF·implementation·has·fully·replaced·the·legacy·read
 class KafkaIOReadImplementationCompatibility {
 
diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFn.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFn.java
index e1d2897681a..eaf8125242f 100644
--- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFn.java
+++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFn.java
@@ -88,7 +88,7 @@ import org.slf4j.LoggerFactory;
  *
  * <h4>Splitting</h4>
  *
- * <p>TODO(BEAM-10319): Add support for initial splitting.
+ * <p>TODO(https://github.com/apache/beam/issues/20280): Add support for initial splitting.
  *
  * <h4>Checkpoint and Resume Processing</h4>
  *
diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/WatchKafkaTopicPartitionDoFn.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/WatchKafkaTopicPartitionDoFn.java
index afe6c66159d..09337c0ba90 100644
--- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/WatchKafkaTopicPartitionDoFn.java
+++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/WatchKafkaTopicPartitionDoFn.java
@@ -51,7 +51,9 @@ import org.joda.time.Instant;
  */
 @SuppressWarnings({
   "nullness",
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 @Experimental
 class WatchKafkaTopicPartitionDoFn extends DoFn<KV<byte[], byte[]>, KafkaSourceDescriptor> {
diff --git a/sdks/java/io/solr/src/test/java/org/apache/beam/sdk/io/solr/SolrIOTest.java b/sdks/java/io/solr/src/test/java/org/apache/beam/sdk/io/solr/SolrIOTest.java
index f08ff5bdaf4..399e5f8a3ee 100644
--- a/sdks/java/io/solr/src/test/java/org/apache/beam/sdk/io/solr/SolrIOTest.java
+++ b/sdks/java/io/solr/src/test/java/org/apache/beam/sdk/io/solr/SolrIOTest.java
@@ -275,7 +275,7 @@ public class SolrIOTest extends SolrCloudTestCase {
     try {
       pipeline.run();
     } catch (final Pipeline.PipelineExecutionException e) {
-      // Hack: await all worker threads completing (BEAM-4040)
+      // Hack: await all worker threads completing (https://github.com/apache/beam/issues/18893)
       int waitAttempts = 30; // defensive coding
       while (namedThreadIsAlive("direct-runner-worker") && waitAttempts-- >= 0) {
         LOG.info("Pausing to allow direct-runner-worker threads to finish");
diff --git a/sdks/java/io/splunk/src/main/java/org/apache/beam/sdk/io/splunk/SplunkEventWriter.java b/sdks/java/io/splunk/src/main/java/org/apache/beam/sdk/io/splunk/SplunkEventWriter.java
index 97708013c53..f790d8b42d9 100644
--- a/sdks/java/io/splunk/src/main/java/org/apache/beam/sdk/io/splunk/SplunkEventWriter.java
+++ b/sdks/java/io/splunk/src/main/java/org/apache/beam/sdk/io/splunk/SplunkEventWriter.java
@@ -56,7 +56,9 @@ import org.slf4j.LoggerFactory;
 @AutoValue
 @SuppressWarnings({
   "nullness", // TODO(https://github.com/apache/beam/issues/20497)
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 abstract class SplunkEventWriter extends DoFn<KV<Integer, SplunkEvent>, SplunkWriteError> {
 
diff --git a/sdks/java/testing/nexmark/build.gradle b/sdks/java/testing/nexmark/build.gradle
index 3537397b673..3a8d3440c80 100644
--- a/sdks/java/testing/nexmark/build.gradle
+++ b/sdks/java/testing/nexmark/build.gradle
@@ -135,7 +135,7 @@ def getNexmarkArgs = {
       def sdkContainerImage = project.findProperty('sdkContainerImage') ?: project(":runners:google-cloud-dataflow-java").dockerJavaImageName
       nexmarkArgsList.add("--sdkContainerImage=${sdkContainerImage}")
 
-      // TODO(BEAM-12295) enable all queries once issues with runner V2 is fixed.
+      // TODO(https://github.com/apache/beam/issues/20880) enable all queries once issues with runner V2 is fixed.
       if (nexmarkArgsList.contains("--streaming=true")) {
         nexmarkArgsList.add("--skipQueries=AVERAGE_PRICE_FOR_CATEGORY,AVERAGE_SELLING_PRICE_BY_SELLER,WINNING_BIDS,BOUNDED_SIDE_INPUT_JOIN,SESSION_SIDE_INPUT_JOIN,PORTABILITY_BATCH") // 4, 6, 9, 13, 14, 15
       } else {
diff --git a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkUtils.java b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkUtils.java
index 3e7cd502a1e..568ac37f2ba 100644
--- a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkUtils.java
+++ b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkUtils.java
@@ -92,7 +92,9 @@ import org.slf4j.LoggerFactory;
 /** Odd's 'n Ends used throughout queries and driver. */
 @SuppressWarnings({
   "nullness", // TODO(https://github.com/apache/beam/issues/20497)
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class NexmarkUtils {
   private static final Logger LOG = LoggerFactory.getLogger(NexmarkUtils.class);
diff --git a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/Query10.java b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/Query10.java
index cd268968d4e..ae9ac559a79 100644
--- a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/Query10.java
+++ b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/Query10.java
@@ -124,7 +124,8 @@ public class Query10 extends NexmarkQueryTransform<Done> {
   private WritableByteChannel openWritableGcsFile(GcsOptions options, String filename)
       throws IOException {
     // TODO
-    // [BEAM-10879] Fix after PR: right now this is a specific Google added use case
+    // [https://github.com/apache/beam/issues/20670] Fix after PR: right now this is a specific
+    // Google added use case
     // Discuss it on ML: shall we keep GCS or use HDFS or use a generic beam filesystem way.
     throw new UnsupportedOperationException("Disabled after removal of GcsIOChannelFactory");
   }
diff --git a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/Query3.java b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/Query3.java
index b9473a4770f..92d58011184 100644
--- a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/Query3.java
+++ b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/queries/Query3.java
@@ -63,7 +63,9 @@ import org.slf4j.LoggerFactory;
  */
 @SuppressWarnings({
   "nullness", // TODO(https://github.com/apache/beam/issues/20497)
-  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
+  // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
+  // errorprone is released (2.11.0)
+  "unused"
 })
 public class Query3 extends NexmarkQueryTransform<NameCityStateId> {
 
diff --git a/sdks/java/testing/nexmark/src/test/java/org/apache/beam/sdk/nexmark/queries/SqlQueryTest.java b/sdks/java/testing/nexmark/src/test/java/org/apache/beam/sdk/nexmark/queries/SqlQueryTest.java
index 5f054f7377c..6fa50928d19 100644
--- a/sdks/java/testing/nexmark/src/test/java/org/apache/beam/sdk/nexmark/queries/SqlQueryTest.java
+++ b/sdks/java/testing/nexmark/src/test/java/org/apache/beam/sdk/nexmark/queries/SqlQueryTest.java
@@ -122,13 +122,13 @@ public class SqlQueryTest {
     }
 
     @Test
-    @Ignore("https://jira.apache.org/jira/browse/BEAM-7072")
+    @Ignore("https://github.com/apache/beam/issues/19541")
     public void sqlQuery5MatchesModelBatch() {
       queryMatchesModel("SqlQuery5TestBatch", getQuery5(CONFIG), new Query5Model(CONFIG), false);
     }
 
     @Test
-    @Ignore("https://jira.apache.org/jira/browse/BEAM-7072")
+    @Ignore("https://github.com/apache/beam/issues/19541")
     public void sqlQuery5MatchesModelStreaming() {
       queryMatchesModel("SqlQuery5TestStreaming", getQuery5(CONFIG), new Query5Model(CONFIG), true);
     }
diff --git a/sdks/java/testing/nexmark/src/test/java/org/apache/beam/sdk/nexmark/queries/sql/SqlQuery5Test.java b/sdks/java/testing/nexmark/src/test/java/org/apache/beam/sdk/nexmark/queries/sql/SqlQuery5Test.java
index 31d48a94040..1c5d230186e 100644
--- a/sdks/java/testing/nexmark/src/test/java/org/apache/beam/sdk/nexmark/queries/sql/SqlQuery5Test.java
+++ b/sdks/java/testing/nexmark/src/test/java/org/apache/beam/sdk/nexmark/queries/sql/SqlQuery5Test.java
@@ -61,7 +61,7 @@ public class SqlQuery5Test {
   @Rule public TestPipeline testPipeline = TestPipeline.create();
 
   @Test
-  @Ignore("https://jira.apache.org/jira/browse/BEAM-7072")
+  @Ignore("https://github.com/apache/beam/issues/19541")
   public void testBids() throws Exception {
     assertEquals(Long.valueOf(config.windowSizeSec), Long.valueOf(config.windowPeriodSec * 2));
 
diff --git a/sdks/java/testing/nexmark/src/test/java/org/apache/beam/sdk/nexmark/sources/UnboundedEventSourceTest.java b/sdks/java/testing/nexmark/src/test/java/org/apache/beam/sdk/nexmark/sources/UnboundedEventSourceTest.java
index 1980ee88830..4ed0e3d8aca 100644
--- a/sdks/java/testing/nexmark/src/test/java/org/apache/beam/sdk/nexmark/sources/UnboundedEventSourceTest.java
+++ b/sdks/java/testing/nexmark/src/test/java/org/apache/beam/sdk/nexmark/sources/UnboundedEventSourceTest.java
@@ -79,7 +79,7 @@ public class UnboundedEventSourceTest {
    * Check aggressively checkpointing and resuming a reader gives us exactly the same event stream
    * as reading directly.
    */
-  @Ignore("TODO(BEAM-5070) Test is flaky. Fix before reenabling.")
+  @Ignore("TODO(https://github.com/apache/beam/issues/19077) Test is flaky. Fix before reenabling.")
   @Test
   public void resumeFromCheckpoint() throws IOException {
     Random random = new Random(297);
diff --git a/sdks/java/testing/tpcds/build.gradle b/sdks/java/testing/tpcds/build.gradle
index ed46f6619b8..34e9a54c9c9 100644
--- a/sdks/java/testing/tpcds/build.gradle
+++ b/sdks/java/testing/tpcds/build.gradle
@@ -56,7 +56,7 @@ configurations {
 dependencies {
     implementation enforcedPlatform(library.java.google_cloud_platform_libraries_bom)
 
-    // TODO(BEAM-13179): Determine how to build without this dependency
+    // TODO(https://github.com/apache/beam/issues/21156): Determine how to build without this dependency
     provided "org.immutables:value:2.8.8"
     permitUnusedDeclared "org.immutables:value:2.8.8"
     implementation library.java.avro