You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by pa...@apache.org on 2022/09/29 20:08:06 UTC
[beam] branch master updated: [Playground] [Backend] Removing the code related to the Cloud Storage (#22872)
This is an automated email from the ASF dual-hosted git repository.
pabloem pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git
The following commit(s) were added to refs/heads/master by this push:
new 5669ef43822 [Playground] [Backend] Removing the code related to the Cloud Storage (#22872)
5669ef43822 is described below
commit 5669ef43822ea4008c93d93c6714354dfb415c01
Author: Vladislav Chunikhin <10...@users.noreply.github.com>
AuthorDate: Fri Sep 30 00:07:56 2022 +0400
[Playground] [Backend] Removing the code related to the Cloud Storage (#22872)
* [Playground] [Backend] added the datastore query to get an example catalog
* [Playground] [Backend] added datastore queries to get precompiled objects
* [Playground] [Backend] added cache component for examples
* [Playground] [Backend] updated example endpoints
* [Playground] [Backend] added sdk as a part of example response
* [Playground] [Backend] removed the storage client from playground backend
* [Playground] [Backend] refactoring for unit tests
* [Playground] [Backend] refactoring for unit tests
* [Playground] [Backend] refactoring for unit tests
* [Playground] [Backend] refactoring for db utils
* [Playground] [Backend] updated CONTRIBUTE.md file
* [Playground] [Backend] refactoring for datastore db
* [Playground] [Backend] refactoring for datastore db
* [Playground] [Backend] refactoring for datastore db
* [Playground] [Backend] refactoring for datastore db
* [Playground] [Backend] minor refactoring after review
* [Playground] [Backend] minor refactoring after review
* [Playground] [Backend] minor refactoring after review
* [Playground] [Backend] minor refactoring after review
* [Playground] [Backend] updated complexity field
* [Playground] [Backend] updated complexity field
* [Playground] [Backend] updated complexity field
* squash! [Playground] [Backend] updated complexity field
* [Playground] [Backend] updated complexity field
* updated complexity field
* [Playground] [Backend] updated complexity field
* updated complexity field
* [Playground] [Backend] updated complexity field
* updated complexity field
* [Playground] [Backend] fixed integration tests for datastore
* [Playground] [Backend] fixed integration tests for datastore
* [Playground] [Backend] added testing data cleaner
* [Playground] [Backend] added testing data cleaner
* [Playground] [Backend] refactoring for integration tests
* [Playground] [Backend] fixed integration tests for datastore
* [Playground] [Backend] added testing data cleaner
* [Playground] [Backend] refactoring for integration tests
* [Playground] [Backend] added one more gradle task to run tests without cache
* fixed integration tests
* [Playground] [Backend] added testing data cleaner
* [Playground] [Backend] refactoring for integration tests
* [Playground] [Backend] added one more gradle task to run tests without cache
* [Playground] [Backend] edited test execution
* fixed integration tests
* [Playground] [Backend] added testing data cleaner
* [Playground] [Backend] refactoring for integration tests
* [Playground] [Backend] added one more gradle task to run tests without cache
* [Playground] [Backend] edited test execution
* fixed integration tests
* [Playground] [Backend] added testing data cleaner
* [Playground] [Backend] refactoring for integration tests
* [Playground] [Backend] added one more gradle task to run tests without cache
* [Playground] [Backend] edited test execution
* [Playground] [Backend] updated playground README
* [Playground] [Backend] edited getting datastore key
* [Playground] [Backend] edited getting datastore key
* [Playground] [Backend] updated unit tests for diferent namespaces
* [Playground] [Backend] edited getting datastore key
* [Playground] [Backend] updated unit tests for diferent namespaces
* [Playground] [Backend] edited cache component tests
* [Playground] [Backend] edited getting datastore key
* [Playground] [Backend] updated unit tests for diferent namespaces
* [Playground] [Backend] edited cache component tests
* [Playground] [Backend] updated integration tests
* [Playground] [Backend] edited getting datastore key
* [Playground] [Backend] updated unit tests for diferent namespaces
* [Playground] [Backend] edited cache component tests
* [Playground] [Backend] updated integration tests
* [Playground] [Backend] edited getting datastore key
* [Playground] [Backend] updated unit tests for diferent namespaces
* [Playground] [Backend] edited cache component tests
* [Playground] [Backend] updated integration tests
* [Playground] [Infrastructure] added the datastore client
* [Playground] [Infrastructure] added the type property to an example entity
* [Playground] [Infrastructure] removed the cloud storage client
* [Playground] [Infrastructure] changed validation examples for duplicates by name
* [Playground] [Infrastructure] fixed duplicated example names
* [Playground] [Infrastructure] added the google cloud project id as an env variable
* [Playground] [Backend] updated docker files for runners
* [Playground] [Backend] added SDK validation to save a code snippet
* [Playground] [Backend] removed comments
* [Playground] [Backend] resolved comments after review
* [Playground] [Infrastructure] updated dependency versions
* [Playground] [Backend] fixed the java runner for examples with graphs
* [Playground] [Infrastructure] added loading catalogs to the cloud datastore
* [Playground] [Backend] remove extra imports after merging
* [Playground] [Backend] fixes after merging with other branches
* [Playground] [Backend] added genreics for one method
* [Playground] [Backend] fixes after merging
* [Playground] [Backend] fixes after merging
* fixing dependencies after merge
fixing dependencies after merge with failed tests
Co-authored-by: oborysevych <ol...@akvelon.com>
---
playground/README.md | 7 +
playground/api/v1/api.proto | 4 +-
playground/backend/CONTRIBUTE.md | 12 +-
playground/backend/build.gradle.kts | 1 -
playground/backend/containers/go/Dockerfile | 1 -
playground/backend/containers/java/Dockerfile | 1 -
playground/backend/containers/python/Dockerfile | 1 -
playground/backend/containers/router/Dockerfile | 1 -
playground/backend/containers/scio/Dockerfile | 1 -
playground/backend/go.mod | 5 +-
playground/backend/go.sum | 9 +-
playground/backend/internal/api/v1/api_grpc.pb.go | 8 +-
.../internal/cloud_bucket/precompiled_objects.go | 433 ---------------------
.../cloud_bucket/precompiled_objects_test.go | 264 -------------
playground/backend/internal/db/entity/snippet.go | 24 +-
.../internal/db/mapper/datastore_mapper_test.go | 2 +-
.../db/schema/migration/migration_v001_test.go | 2 +-
.../backend/internal/environment/application.go | 11 +-
.../internal/environment/environment_service.go | 10 +-
.../environment/environment_service_test.go | 17 +-
.../backend/internal/utils/datastore_utils.go | 21 -
.../backend/internal/utils/datastore_utils_test.go | 36 --
.../internal/utils/precompiled_objects_utils.go | 87 -----
.../utils/precompiled_objects_utils_test.go | 125 +-----
playground/infrastructure/proxy/allow_list.py | 1 -
25 files changed, 56 insertions(+), 1028 deletions(-)
diff --git a/playground/README.md b/playground/README.md
index 11e17ff0322..61099924829 100644
--- a/playground/README.md
+++ b/playground/README.md
@@ -81,6 +81,13 @@ cd beam
./gradlew playground:backend:removeUnusedSnippet -DdayDiff={int} -DprojectId={string}
```
+## Run playground tests without cache
+
+```
+cd beam
+ ./gradlew playground:backend:testWithoutCache
+```
+
# Deployment
See [terraform](./terraform/README.md) for details on how to build and deploy
diff --git a/playground/api/v1/api.proto b/playground/api/v1/api.proto
index 68d2c9e7651..4a458d3750c 100644
--- a/playground/api/v1/api.proto
+++ b/playground/api/v1/api.proto
@@ -315,10 +315,10 @@ service PlaygroundService {
// Cancel code processing
rpc Cancel(CancelRequest) returns (CancelResponse);
- // Get all precompiled objects from the cloud storage.
+ // Get all precompiled objects from the cloud datastore.
rpc GetPrecompiledObjects(GetPrecompiledObjectsRequest) returns (GetPrecompiledObjectsResponse);
- // Get precompiled object from the cloud storage.
+ // Get precompiled object from the cloud datastore.
rpc GetPrecompiledObject(GetPrecompiledObjectRequest) returns (GetPrecompiledObjectResponse);
// Get the code of an PrecompiledObject.
diff --git a/playground/backend/CONTRIBUTE.md b/playground/backend/CONTRIBUTE.md
index 32ba1c57673..ee26798ede6 100644
--- a/playground/backend/CONTRIBUTE.md
+++ b/playground/backend/CONTRIBUTE.md
@@ -36,8 +36,10 @@ backend/
├── internal # backend business logic
│ ├── api # generated grpc API files
│ ├── cache # logic of work with cache
-│ ├── cloud_bucket # logic of work with cloud buckets and precompiled objects
│ ├── code_processing # logic of processing the received code
+│ ├── components # logic of work for more difficult processes using several packages
+│ ├── constants # application constants to use them anywhere in the application
+│ ├── db # logic of work with database, e.g. the Cloud Datastore
│ ├── environment # backend environments e.g. SDK of the instance
│ ├── errors # custom errors to send them to the client
│ ├── executors # logic of work with code executors
@@ -46,10 +48,14 @@ backend/
│ ├── preparers # logic of preparing code before execution
│ ├── setup_tools # logic of set up of executors and file systems by SDK requirements
│ ├── streaming # logic of saving execution output as a stream
+│ ├── tests # logic of work with unit/integration tests in the application, e.g. testing scripts to download mock data to database
│ ├── utils # different useful tools
│ └── validators # logic of validation code before execution
-├── go.mod # define backend go module and contain all project's dependencies
-├── logging.properties # config file to set up log for Java code
+├── go.mod # define backend go module and contain all project's dependencies
+├── logging.properties # config file to set up log for Java code
+├── properties.yaml # property file consists of application properties required for the operation logic
+├── start_datastore_emulator.sh # shell script to run the datastore emulator for local deployment or testing
+├── stop_datastore_emulator.sh # shell script to stop the datastore emulator
...
```
diff --git a/playground/backend/build.gradle.kts b/playground/backend/build.gradle.kts
index 8a66cad854e..124622b3a9e 100644
--- a/playground/backend/build.gradle.kts
+++ b/playground/backend/build.gradle.kts
@@ -131,7 +131,6 @@ task("benchmarkCodeProcessing") {
}
task("benchmark") {
- dependsOn(":playground:backend:benchmarkPrecompiledObjects")
dependsOn(":playground:backend:benchmarkCodeProcessing")
}
diff --git a/playground/backend/containers/go/Dockerfile b/playground/backend/containers/go/Dockerfile
index db162e9bfe8..857d9157044 100644
--- a/playground/backend/containers/go/Dockerfile
+++ b/playground/backend/containers/go/Dockerfile
@@ -55,7 +55,6 @@ ENV SERVER_IP=0.0.0.0
ENV SERVER_PORT=8080
ENV APP_WORK_DIR=/opt/playground/backend/
ENV BEAM_SDK="SDK_GO"
-ENV BUCKET_NAME="playground-precompiled-objects"
ENV PROPERTY_PATH=/opt/playground/backend/properties.yaml
## Copy build result
COPY src/configs /opt/playground/backend/configs/
diff --git a/playground/backend/containers/java/Dockerfile b/playground/backend/containers/java/Dockerfile
index ccc6f096920..6f2e564ff06 100644
--- a/playground/backend/containers/java/Dockerfile
+++ b/playground/backend/containers/java/Dockerfile
@@ -46,7 +46,6 @@ ENV SERVER_IP=0.0.0.0
ENV SERVER_PORT=8080
ENV APP_WORK_DIR=/opt/playground/backend/
ENV BEAM_SDK="SDK_JAVA"
-ENV BUCKET_NAME="playground-precompiled-objects"
ENV PROPERTY_PATH=/opt/playground/backend/properties.yaml
# Copy build result
diff --git a/playground/backend/containers/python/Dockerfile b/playground/backend/containers/python/Dockerfile
index ca7669f96b6..2a9182a129a 100644
--- a/playground/backend/containers/python/Dockerfile
+++ b/playground/backend/containers/python/Dockerfile
@@ -42,7 +42,6 @@ ENV SERVER_IP=0.0.0.0
ENV SERVER_PORT=8080
ENV APP_WORK_DIR=/opt/playground/backend/
ENV BEAM_SDK="SDK_PYTHON"
-ENV BUCKET_NAME="playground-precompiled-objects"
ENV PROPERTY_PATH=/opt/playground/backend/properties.yaml
# Copy build result
diff --git a/playground/backend/containers/router/Dockerfile b/playground/backend/containers/router/Dockerfile
index a15edd56d25..e56443412b5 100644
--- a/playground/backend/containers/router/Dockerfile
+++ b/playground/backend/containers/router/Dockerfile
@@ -68,7 +68,6 @@ ENV SERVER_IP=0.0.0.0
ENV SERVER_PORT=8080
ENV APP_WORK_DIR=/opt/playground/backend/
ENV BEAM_SDK="SDK_UNSPECIFIED"
-ENV BUCKET_NAME="playground-precompiled-objects"
ENV SDK_CONFIG=/opt/playground/backend/sdks.yaml
ENV PROPERTY_PATH=/opt/playground/backend/
diff --git a/playground/backend/containers/scio/Dockerfile b/playground/backend/containers/scio/Dockerfile
index e84f152e2a3..6df3c0ad4b8 100644
--- a/playground/backend/containers/scio/Dockerfile
+++ b/playground/backend/containers/scio/Dockerfile
@@ -40,7 +40,6 @@ ENV SERVER_IP=0.0.0.0
ENV SERVER_PORT=8080
ENV APP_WORK_DIR=/opt/playground/backend/
ENV BEAM_SDK="SDK_SCIO"
-ENV BUCKET_NAME="playground-precompiled-objects"
ENV PROPERTY_PATH=/opt/playground/backend/properties.yaml
# Copy build result
diff --git a/playground/backend/go.mod b/playground/backend/go.mod
index aa22bf22f5c..87da2468e62 100644
--- a/playground/backend/go.mod
+++ b/playground/backend/go.mod
@@ -20,7 +20,6 @@ go 1.18
require (
cloud.google.com/go/datastore v1.6.0
cloud.google.com/go/logging v1.4.2
- cloud.google.com/go/storage v1.23.0
github.com/go-redis/redis/v8 v8.11.4
github.com/go-redis/redismock/v8 v8.0.6
github.com/google/uuid v1.3.0
@@ -29,7 +28,6 @@ require (
github.com/rs/cors v1.8.0
github.com/spf13/viper v1.12.0
go.uber.org/goleak v1.1.12
- google.golang.org/api v0.85.0
google.golang.org/grpc v1.47.0
google.golang.org/protobuf v1.28.0
gopkg.in/yaml.v3 v3.0.1
@@ -38,7 +36,6 @@ require (
require (
cloud.google.com/go v0.102.1 // indirect
cloud.google.com/go/compute v1.7.0 // indirect
- cloud.google.com/go/iam v0.3.0 // indirect
github.com/cespare/xxhash/v2 v2.1.2 // indirect
github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
@@ -48,7 +45,6 @@ require (
github.com/google/go-cmp v0.5.8 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.1.0 // indirect
github.com/googleapis/gax-go/v2 v2.4.0 // indirect
- github.com/googleapis/go-type-adapters v1.0.0 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/klauspost/compress v1.13.1 // indirect
github.com/magiconair/properties v1.8.6 // indirect
@@ -67,6 +63,7 @@ require (
golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c // indirect
golang.org/x/text v0.3.7 // indirect
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f // indirect
+ google.golang.org/api v0.84.0 // indirect
google.golang.org/appengine v1.6.7 // indirect
google.golang.org/genproto v0.0.0-20220617124728-180714bec0ad // indirect
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
diff --git a/playground/backend/go.sum b/playground/backend/go.sum
index 9daeae75b17..523fbdd940d 100644
--- a/playground/backend/go.sum
+++ b/playground/backend/go.sum
@@ -49,7 +49,6 @@ cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7
cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
cloud.google.com/go/datastore v1.6.0 h1:wZaHIqu1tebvGRYhVgcfNX6jN2q638OGO23JyJckxuI=
cloud.google.com/go/datastore v1.6.0/go.mod h1:q3ZJj1GMQRdU0OCv5XXpCqfLqHHZnI5zcumkvuYDmHI=
-cloud.google.com/go/iam v0.3.0 h1:exkAomrVUuzx9kWFI1wm3KI0uoDeUFPB4kKGzx6x+Gc=
cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY=
cloud.google.com/go/logging v1.4.2 h1:Mu2Q75VBDQlW1HlBMjTX4X84UFR73G1TiLlRYc/b7tA=
cloud.google.com/go/logging v1.4.2/go.mod h1:jco9QZSx8HiVVqLJReq7z7bVdj0P1Jb9PDFs63T+axo=
@@ -64,8 +63,6 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo=
cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y=
-cloud.google.com/go/storage v1.23.0 h1:wWRIaDURQA8xxHguFCshYepGlrWIrbBnAmc7wfg07qY=
-cloud.google.com/go/storage v1.23.0/go.mod h1:vOEEDNFnciUMhBeT6hsJIn3ieU5cFRmzeLgDvXzfIXc=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
@@ -250,11 +247,9 @@ github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8
github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
-github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
-github.com/google/martian/v3 v3.2.1 h1:d8MncMlErDFTwQGBK1xhv026j9kqhvw1Qv9IbWT1VLQ=
github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
@@ -287,7 +282,6 @@ github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/Oth
github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM=
github.com/googleapis/gax-go/v2 v2.4.0 h1:dS9eYAjhrE2RjmzYw2XAPvcXfmcQLtFEQWn0CR82awk=
github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c=
-github.com/googleapis/go-type-adapters v1.0.0 h1:9XdMn+d/G57qq1s8dNc5IesGCXHf6V2HZ2JwRxfA2tA=
github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4=
github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
@@ -910,9 +904,8 @@ google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRR
google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA=
google.golang.org/api v0.78.0/go.mod h1:1Sg78yoMLOhlQTeF+ARBoytAcH1NNyyl390YMy6rKmw=
google.golang.org/api v0.80.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg=
+google.golang.org/api v0.84.0 h1:NMB9J4cCxs9xEm+1Z9QiO3eFvn7EnQj3Eo3hN6ugVlg=
google.golang.org/api v0.84.0/go.mod h1:NTsGnUFJMYROtiquksZHBWtHfeMC7iYthki7Eq3pa8o=
-google.golang.org/api v0.85.0 h1:8rJoHuRxx+vCmZtAO/3k1dRLvYNVyTJtZ5oaFZvhgvc=
-google.golang.org/api v0.85.0/go.mod h1:AqZf8Ep9uZ2pyTvgL+x0D3Zt0eoT9b5E8fmzfu6FO2g=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
diff --git a/playground/backend/internal/api/v1/api_grpc.pb.go b/playground/backend/internal/api/v1/api_grpc.pb.go
index 16c990d37fb..aa5bac81531 100644
--- a/playground/backend/internal/api/v1/api_grpc.pb.go
+++ b/playground/backend/internal/api/v1/api_grpc.pb.go
@@ -55,9 +55,9 @@ type PlaygroundServiceClient interface {
GetCompileOutput(ctx context.Context, in *GetCompileOutputRequest, opts ...grpc.CallOption) (*GetCompileOutputResponse, error)
// Cancel code processing
Cancel(ctx context.Context, in *CancelRequest, opts ...grpc.CallOption) (*CancelResponse, error)
- // Get all precompiled objects from the cloud storage.
+ // Get all precompiled objects from the cloud datastore.
GetPrecompiledObjects(ctx context.Context, in *GetPrecompiledObjectsRequest, opts ...grpc.CallOption) (*GetPrecompiledObjectsResponse, error)
- // Get precompiled object from the cloud storage.
+ // Get precompiled object from the cloud datastore.
GetPrecompiledObject(ctx context.Context, in *GetPrecompiledObjectRequest, opts ...grpc.CallOption) (*GetPrecompiledObjectResponse, error)
// Get the code of an PrecompiledObject.
GetPrecompiledObjectCode(ctx context.Context, in *GetPrecompiledObjectCodeRequest, opts ...grpc.CallOption) (*GetPrecompiledObjectCodeResponse, error)
@@ -278,9 +278,9 @@ type PlaygroundServiceServer interface {
GetCompileOutput(context.Context, *GetCompileOutputRequest) (*GetCompileOutputResponse, error)
// Cancel code processing
Cancel(context.Context, *CancelRequest) (*CancelResponse, error)
- // Get all precompiled objects from the cloud storage.
+ // Get all precompiled objects from the cloud datastore.
GetPrecompiledObjects(context.Context, *GetPrecompiledObjectsRequest) (*GetPrecompiledObjectsResponse, error)
- // Get precompiled object from the cloud storage.
+ // Get precompiled object from the cloud datastore.
GetPrecompiledObject(context.Context, *GetPrecompiledObjectRequest) (*GetPrecompiledObjectResponse, error)
// Get the code of an PrecompiledObject.
GetPrecompiledObjectCode(context.Context, *GetPrecompiledObjectCodeRequest) (*GetPrecompiledObjectCodeResponse, error)
diff --git a/playground/backend/internal/cloud_bucket/precompiled_objects.go b/playground/backend/internal/cloud_bucket/precompiled_objects.go
deleted file mode 100644
index 76eb6a71985..00000000000
--- a/playground/backend/internal/cloud_bucket/precompiled_objects.go
+++ /dev/null
@@ -1,433 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License. You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package cloud_bucket
-
-import (
- pb "beam.apache.org/playground/backend/internal/api/v1"
- "beam.apache.org/playground/backend/internal/logger"
- "cloud.google.com/go/storage"
- "context"
- "encoding/json"
- "fmt"
- "google.golang.org/api/iterator"
- "google.golang.org/api/option"
- "io"
- "io/ioutil"
- "os"
- "path/filepath"
- "strings"
- "time"
-)
-
-const (
- OutputExtension = "output"
- LogsExtension = "log"
- GraphExtension = "graph"
- defaultPrecompiledObjectInfo = "defaultPrecompiledObject.info"
- MetaInfoName = "meta.info"
- Timeout = time.Minute
- javaExtension = "java"
- goExtension = "go"
- pyExtension = "py"
- scioExtension = "scala"
- separatorsNumber = 3
-)
-
-type ObjectInfo struct {
- Name string
- CloudPath string
- Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"`
- Type pb.PrecompiledObjectType `protobuf:"varint,4,opt,name=type,proto3,enum=api.v1.PrecompiledObjectType" json:"type,omitempty"`
- Categories []string `json:"categories,omitempty"`
- PipelineOptions string `protobuf:"bytes,3,opt,name=pipeline_options,proto3" json:"pipeline_options,omitempty"`
- Link string `protobuf:"bytes,3,opt,name=link,proto3" json:"link,omitempty"`
- Multifile bool `protobuf:"varint,7,opt,name=multifile,proto3" json:"multifile,omitempty"`
- ContextLine int32 `protobuf:"varint,7,opt,name=context_line,proto3" json:"context_line,omitempty"`
- DefaultExample bool `protobuf:"varint,7,opt,name=default_example,json=defaultExample,proto3" json:"default_example,omitempty"`
-}
-
-type PrecompiledObjects []ObjectInfo
-type CategoryToPrecompiledObjects map[string]PrecompiledObjects
-type SdkToCategories map[string]CategoryToPrecompiledObjects
-
-// CloudStorage represents working tools for getting compiled and
-// run beam examples from Google Cloud Storage. It is required that
-// the bucket where examples are stored would be public,
-// and it has a specific structure of files, namely:
-// SDK_JAVA/
-// ----defaultPrecompiledObject.info
-// ----PRECOMPILED_OBJECT_TYPE_EXAMPLE/
-// --------MinimalWordCount/
-// ----------- MinimalWordCount.java
-// ----------- MinimalWordCount.output
-// ----------- MinimalWordCount.log
-// ----------- MinimalWordCount.graph
-// ----------- meta.info
-// --------JoinExamples/
-// ----------- JoinExamples.java
-// ----------- JoinExamples.output
-// ----------- JoinExamples.log
-// ----------- JoinExamples.graph
-// ----------- meta.info
-// ----PRECOMPILED_OBJECT_TYPE_KATA/
-// --------...
-// ----...
-// SDK_GO/
-// ----defaultPrecompiledObject.info
-// ----PRECOMPILED_OBJECT_TYPE_EXAMPLE/
-// --------MinimalWordCount/
-// ----------- MinimalWordCount.go
-// ----------- MinimalWordCount.output
-// ----------- MinimalWordCount.log
-// ----------- MinimalWordCount.graph
-// ----------- meta.info
-// --------PingPong/
-// ----PRECOMPILED_OBJECT_TYPE_KATA/
-// --------...
-// ----...
-//
-// defaultPrecompiledObject.info is a file that contains path to the default example:
-// {
-// "SDK_JAVA": "SDK_JAVA/PRECOMPILED_OBJECT_TYPE_EXAMPLE/MinimalWordCount"
-// }
-//
-// meta.info is a json file that has the following fields:
-// {
-// "name": "name of the example",
-// "description": "Description of an example",
-// "multifile": false
-// "categories": ["Common", "IO"]
-// "pipeline_options": "--key1 value1",
-// "default_example": false,
-// "context_line": 1,
-// "link": "https://github.com/apache/beam/blob/master/path/to/example"
-// }
-//
-type CloudStorage struct {
-}
-
-func New() *CloudStorage {
- return &CloudStorage{}
-}
-
-// GetPrecompiledObject returns the precompiled example
-func (cd *CloudStorage) GetPrecompiledObject(ctx context.Context, precompiledObjectPath, bucketName string) (*pb.PrecompiledObject, error) {
- cloudPath := filepath.Join(precompiledObjectPath, MetaInfoName)
- data, err := cd.getFileFromBucket(ctx, cloudPath, "", bucketName)
- if err != nil {
- return nil, err
- }
- precompiledObject := &pb.PrecompiledObject{}
- err = json.Unmarshal(data, precompiledObject)
- if err != nil {
- logger.Errorf("json.Unmarshal: %v", err.Error())
- return nil, err
- }
- precompiledObject.CloudPath = precompiledObjectPath
- return precompiledObject, nil
-}
-
-// GetPrecompiledObjectCode returns the source code of the example
-func (cd *CloudStorage) GetPrecompiledObjectCode(ctx context.Context, precompiledObjectPath, bucketName string) (string, error) {
- extension, err := getFileExtensionBySdk(precompiledObjectPath)
- if err != nil {
- return "", err
- }
- data, err := cd.getFileFromBucket(ctx, precompiledObjectPath, extension, bucketName)
- if err != nil {
- return "", err
- }
- result := string(data)
- return result, nil
-}
-
-// GetPrecompiledObjectOutput returns the run output of the example
-func (cd *CloudStorage) GetPrecompiledObjectOutput(ctx context.Context, precompiledObjectPath, bucketName string) (string, error) {
- data, err := cd.getFileFromBucket(ctx, precompiledObjectPath, OutputExtension, bucketName)
- if err != nil {
- return "", err
- }
- result := string(data)
- return result, nil
-}
-
-// GetPrecompiledObjectLogs returns the logs of the example
-func (cd *CloudStorage) GetPrecompiledObjectLogs(ctx context.Context, precompiledObjectPath, bucketName string) (string, error) {
- data, err := cd.getFileFromBucket(ctx, precompiledObjectPath, LogsExtension, bucketName)
- if err != nil {
- return "", err
- }
- result := string(data)
- return result, nil
-}
-
-// GetPrecompiledObjectGraph returns the graph of the example
-func (cd *CloudStorage) GetPrecompiledObjectGraph(ctx context.Context, precompiledObjectPath, bucketName string) (string, error) {
- data, err := cd.getFileFromBucket(ctx, precompiledObjectPath, GraphExtension, bucketName)
- if err != nil {
- return "", err
- }
- return string(data), nil
-}
-
-// GetPrecompiledObjects returns stored at the cloud storage bucket precompiled objects for the target category
-func (cd *CloudStorage) GetPrecompiledObjects(ctx context.Context, targetSdk pb.Sdk, targetCategory, bucketName string) (*SdkToCategories, error) {
- client, err := storage.NewClient(ctx, option.WithoutAuthentication())
- if err != nil {
- return nil, fmt.Errorf("storage.NewClient: %v", err)
- }
- defer client.Close()
-
- ctx, cancel := context.WithTimeout(ctx, Timeout)
- defer cancel()
-
- precompiledObjects := make(SdkToCategories, 0)
- bucket := client.Bucket(bucketName)
-
- dirs, err := cd.getPrecompiledObjectsDirs(ctx, targetSdk, bucket)
- if err != nil {
- return nil, err
- }
- metaFiles := make(map[string][]byte, 0)
- for objectDir := range dirs {
- infoPath := filepath.Join(objectDir, MetaInfoName) // helping file with information about this object
- rc, err := bucket.Object(infoPath).NewReader(ctx)
- if err != nil {
- logger.Errorf("Object(%q).NewReader: %v", infoPath, err.Error())
- continue
- }
- metaFile, err := ioutil.ReadAll(rc)
- if err != nil {
- logger.Errorf("ioutil.ReadAll: %v", err.Error())
- continue
- }
- metaFiles[objectDir] = metaFile
- rc.Close()
- }
-
- for objectDir, metaFile := range metaFiles {
- precompiledObject := ObjectInfo{}
- err = json.Unmarshal(metaFile, &precompiledObject)
- if err != nil {
- logger.Errorf("json.Unmarshal: %v", err.Error())
- continue
- }
-
- folderName := strings.Split(objectDir, string(os.PathSeparator))[1]
- precompiledObject.Type = pb.PrecompiledObjectType(pb.PrecompiledObjectType_value[folderName])
-
- for _, objectCategory := range precompiledObject.Categories {
- if targetCategory == "" || targetCategory == objectCategory { //take only requested categories
- appendPrecompiledObject(precompiledObject, &precompiledObjects, objectDir, objectCategory)
- }
- }
- }
- return &precompiledObjects, nil
-}
-
-// GetDefaultPrecompiledObjects returns the default precompiled objects
-func (cd *CloudStorage) GetDefaultPrecompiledObjects(ctx context.Context, bucketName string) (map[pb.Sdk]*pb.PrecompiledObject, error) {
- client, err := storage.NewClient(ctx, option.WithoutAuthentication())
- if err != nil {
- return nil, fmt.Errorf("storage.NewClient: %v", err)
- }
- defer client.Close()
- bucket := client.Bucket(bucketName)
-
- paths := make(map[pb.Sdk]string, 0)
- for _, sdkName := range pb.Sdk_name {
- sdk := pb.Sdk(pb.Sdk_value[sdkName])
- if sdk == pb.Sdk_SDK_UNSPECIFIED {
- continue
- }
- path, err := cd.getDefaultPrecompiledObjectsPath(ctx, bucket, sdk)
- if err != nil {
- return nil, err
- }
- paths[sdk] = path
- }
-
- defaultPrecompiledObjects := make(map[pb.Sdk]*pb.PrecompiledObject, 0)
- for sdk, path := range paths {
- infoPath := filepath.Join(path, MetaInfoName)
- rc, err := bucket.Object(infoPath).NewReader(ctx)
- if err != nil {
- logger.Errorf("Object(%q).NewReader: %v", infoPath, err.Error())
- continue
- }
- metaFile, err := ioutil.ReadAll(rc)
- if err != nil {
- logger.Errorf("ioutil.ReadAll: %v", err.Error())
- continue
- }
- rc.Close()
-
- precompiledObject := &pb.PrecompiledObject{}
- err = json.Unmarshal(metaFile, &precompiledObject)
- if err != nil {
- logger.Errorf("json.Unmarshal: %v", err.Error())
- return nil, err
- }
- precompiledObject.CloudPath = path
- defaultPrecompiledObjects[sdk] = precompiledObject
- }
- return defaultPrecompiledObjects, nil
-}
-
-// getDefaultPrecompiledObjectsPath returns path for SDK to the default precompiled object
-func (cd *CloudStorage) getDefaultPrecompiledObjectsPath(ctx context.Context, bucket *storage.BucketHandle, sdk pb.Sdk) (string, error) {
- pathToFile := fmt.Sprintf("%s/%s", sdk.String(), defaultPrecompiledObjectInfo)
- rc, err := bucket.Object(pathToFile).NewReader(ctx)
- if err != nil {
- logger.Errorf("Object(%q).NewReader: %v", pathToFile, err.Error())
- return "", err
- }
-
- data, err := io.ReadAll(rc)
- if err != nil {
- logger.Errorf("ioutil.ReadAll: %v", err.Error())
- return "", err
- }
-
- path := make(map[string]string, 0)
- if err := json.Unmarshal(data, &path); err != nil {
- return "", err
- }
- return path[sdk.String()], nil
-}
-
-// getPrecompiledObjectsDirs finds directories with precompiled objects
-// Since there is no notion of directory at cloud storage, then
-// to avoid duplicates of a base path (directory) need to store it in a set/map.
-func (cd *CloudStorage) getPrecompiledObjectsDirs(ctx context.Context, targetSdk pb.Sdk, bucket *storage.BucketHandle) (map[string]bool, error) {
- prefix := targetSdk.String()
- if targetSdk == pb.Sdk_SDK_UNSPECIFIED {
- prefix = ""
- }
- it := bucket.Objects(ctx, &storage.Query{
- Prefix: prefix,
- })
- objectDirs := make(map[string]bool, 0)
- for {
- attrs, err := it.Next()
- if err == iterator.Done {
- break
- }
- if err != nil {
- bucketAttrs, errWithAttrs := bucket.Attrs(ctx)
- if errWithAttrs != nil {
- return nil, fmt.Errorf("error during receiving bucket's attributes: %s", err)
- }
- return nil, fmt.Errorf("Bucket(%q).Objects: %v", bucketAttrs.Name, err)
- }
- path := attrs.Name
- if isPathToPrecompiledObjectFile(path) {
- objectDirs[filepath.Dir(path)] = true //save base path (directory) of a file
- }
- }
- return objectDirs, nil
-}
-
-// appendPrecompiledObject add precompiled object to the common structure of precompiled objects
-func appendPrecompiledObject(objectInfo ObjectInfo, sdkToCategories *SdkToCategories, pathToObject string, categoryName string) {
- sdkName := getSdkName(pathToObject)
- categoryToPrecompiledObjects, ok := (*sdkToCategories)[sdkName]
- if !ok {
- (*sdkToCategories)[sdkName] = make(CategoryToPrecompiledObjects, 0)
- categoryToPrecompiledObjects = (*sdkToCategories)[sdkName]
- }
- objects, ok := categoryToPrecompiledObjects[categoryName]
- if !ok {
- categoryToPrecompiledObjects[categoryName] = make(PrecompiledObjects, 0)
- objects = categoryToPrecompiledObjects[categoryName]
- }
- objectInfo.CloudPath = pathToObject
- objectInfo.Name = filepath.Base(pathToObject)
- categoryToPrecompiledObjects[categoryName] = append(objects, objectInfo)
-}
-
-// getFileFromBucket receives the file from the bucket by its name
-func (cd *CloudStorage) getFileFromBucket(ctx context.Context, pathToObject string, extension, bucketName string) ([]byte, error) {
- client, err := storage.NewClient(ctx, option.WithoutAuthentication())
- if err != nil {
- return nil, fmt.Errorf("storage.NewClient: %v", err)
- }
- defer client.Close()
-
- ctx, cancel := context.WithTimeout(ctx, Timeout)
- defer cancel()
-
- bucket := client.Bucket(bucketName)
-
- filePath := pathToObject
- if extension != "" {
- filePath = getFullFilePath(pathToObject, extension)
- }
- rc, err := bucket.Object(filePath).NewReader(ctx)
- if err != nil {
- return nil, fmt.Errorf("Object(%q).NewReader: %v", filePath, err)
- }
- defer rc.Close()
-
- data, err := ioutil.ReadAll(rc)
- if err != nil {
- return nil, fmt.Errorf("ioutil.ReadAll: %v", err)
- }
- return data, nil
-}
-
-// getFileExtensionBySdk get extension of the file with code by the sdk name
-func getFileExtensionBySdk(precompiledObjectPath string) (string, error) {
- sdk := strings.Split(precompiledObjectPath, string(os.PathSeparator))[0]
- var extension string
- switch sdk {
- case pb.Sdk_SDK_JAVA.String():
- extension = javaExtension
- case pb.Sdk_SDK_PYTHON.String():
- extension = pyExtension
- case pb.Sdk_SDK_GO.String():
- extension = goExtension
- case pb.Sdk_SDK_SCIO.String():
- extension = scioExtension
- default:
- return "", fmt.Errorf("")
- }
- return extension, nil
-}
-
-// getFullFilePath get full path to the precompiled object file
-func getFullFilePath(objectDir string, extension string) string {
- precompiledObjectName := filepath.Base(objectDir) //the base of the object's directory matches the name of the file
- fileName := strings.Join([]string{precompiledObjectName, extension}, ".")
- filePath := filepath.Join(objectDir, fileName)
- return filePath
-}
-
-// isPathToPrecompiledObjectFile is it a path where precompiled object is stored (i.e. SDK/ObjectType/ObjectName/ObjectCode.sdkExtension)
-func isPathToPrecompiledObjectFile(path string) bool {
- return strings.Count(path, string(os.PathSeparator)) == separatorsNumber && !isDir(path)
-}
-
-// isDir checks whether the path imitates directory
-func isDir(path string) bool {
- return path[len(path)-1] == os.PathSeparator
-}
-
-// getSdkName gets category and sdk from the filepath
-func getSdkName(path string) string {
- sdkName := strings.Split(path, string(os.PathSeparator))[0] // the path of the form "sdkName/example/", where the first part is sdkName
- return sdkName
-}
diff --git a/playground/backend/internal/cloud_bucket/precompiled_objects_test.go b/playground/backend/internal/cloud_bucket/precompiled_objects_test.go
deleted file mode 100644
index ece483443b6..00000000000
--- a/playground/backend/internal/cloud_bucket/precompiled_objects_test.go
+++ /dev/null
@@ -1,264 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License. You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package cloud_bucket
-
-import (
- pb "beam.apache.org/playground/backend/internal/api/v1"
- "context"
- "reflect"
- "testing"
-)
-
-const (
- precompiledObjectPath = "SDK_JAVA/PRECOMPILED_OBJECT_TYPE_EXAMPLE/MinimalWordCount"
- targetSdk = pb.Sdk_SDK_UNSPECIFIED
- defaultBucketName = "playground-precompiled-objects"
-)
-
-var bucket *CloudStorage
-var ctx context.Context
-
-func init() {
- bucket = New()
- ctx = context.Background()
-}
-
-func Test_getFullFilePath(t *testing.T) {
- type args struct {
- examplePath string
- extension string
- }
- tests := []struct {
- name string
- args args
- want string
- }{
- {
- // Try to get the full path to the code of the precompiled example
- // by the path to its directory on Cloud Storage:
- // (SDK_JAVA/HelloWorld, java) -> SDK_JAVA/HelloWorld/HelloWorld.java
- name: "Test getFullFilePath()",
- args: args{
- examplePath: "SDK_JAVA/HelloWorld",
- extension: "java",
- },
- want: "SDK_JAVA/HelloWorld/HelloWorld.java",
- },
- }
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- if got := getFullFilePath(tt.args.examplePath, tt.args.extension); got != tt.want {
- t.Errorf("getFullFilePath() = %v, want %v", got, tt.want)
- }
- })
- }
-}
-
-func Test_getSdkName(t *testing.T) {
- type args struct {
- path string
- }
- tests := []struct {
- name string
- args args
- want string
- }{
- {
- // Try to get the name of the SDK from the path
- name: "Test getSdkName",
- args: args{path: "SDK_JAVA/HelloWorld"},
- want: "SDK_JAVA",
- },
- }
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- if got := getSdkName(tt.args.path); got != tt.want {
- t.Errorf("getSdkName() = %v, want %v", got, tt.want)
- }
- })
- }
-}
-
-func Test_isDir(t *testing.T) {
- type args struct {
- path string
- }
- tests := []struct {
- name string
- args args
- want bool
- }{
- {
- name: "Test isDir if it is a directory",
- args: args{path: "SDK_JAVA/HelloWorld/"},
- want: true,
- },
- {
- name: "Test isDir if it is a file",
- args: args{path: "SDK_JAVA/HelloWorld/HelloWorld.java"},
- want: false,
- },
- }
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- if got := isDir(tt.args.path); got != tt.want {
- t.Errorf("isDir() = %v, want %v", got, tt.want)
- }
- })
- }
-}
-
-func Test_isPathToPrecompiledObjectFile(t *testing.T) {
- type args struct {
- path string
- }
- tests := []struct {
- name string
- args args
- want bool
- }{
- {
- name: "Test if path is valid",
- args: args{path: "SDK_JAVA/PRECOMPILED_OBJECT_TYPE_EXAMPLE/HelloWorld/HelloWorld.java"},
- want: true,
- },
- {
- name: "Test if path is not valid",
- args: args{path: "SDK_JAVA/PRECOMPILED_OBJECT_TYPE_EXAMPLE/HelloWorld/"},
- want: false,
- },
- }
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- if got := isPathToPrecompiledObjectFile(tt.args.path); got != tt.want {
- t.Errorf("isPathToPrecompiledObjectFile() = %v, want %v", got, tt.want)
- }
- })
- }
-}
-
-func Test_appendPrecompiledObject(t *testing.T) {
- type args struct {
- objectInfo ObjectInfo
- sdkToCategories *SdkToCategories
- pathToObject string
- categoryName string
- }
- tests := []struct {
- name string
- args args
- want *SdkToCategories
- }{
- {
- name: "Test append new objects",
- args: args{
- objectInfo: ObjectInfo{
- Name: "",
- CloudPath: "",
- Description: "",
- Type: 0,
- Categories: []string{"Common"},
- PipelineOptions: "",
- },
- sdkToCategories: &SdkToCategories{},
- pathToObject: "SDK_JAVA/HelloWorld",
- categoryName: "Common",
- },
- want: &SdkToCategories{"SDK_JAVA": CategoryToPrecompiledObjects{"Common": PrecompiledObjects{ObjectInfo{
- Name: "HelloWorld",
- CloudPath: "SDK_JAVA/HelloWorld",
- Description: "",
- Type: 0,
- Categories: []string{"Common"},
- PipelineOptions: "",
- }}}},
- },
- }
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- appendPrecompiledObject(tt.args.objectInfo, tt.args.sdkToCategories, tt.args.pathToObject, tt.args.categoryName)
- got := tt.args.sdkToCategories
- if !reflect.DeepEqual(got, tt.want) {
- t.Errorf("appendPrecompiledObject() got = %v, want %v", got, tt.want)
- }
- })
- }
-}
-
-func Test_getFileExtensionBySdk(t *testing.T) {
- type args struct {
- precompiledObjectPath string
- }
- tests := []struct {
- name string
- args args
- want string
- wantErr bool
- }{
- {
- // Try to get an extension of a file by the sdk at file path:
- // SDK_JAVA/HelloWorld -> java
- name: "Test getFileExtensionBySdk() valid sdk",
- args: args{precompiledObjectPath: "SDK_JAVA/HelloWorld"},
- want: "java",
- wantErr: false,
- },
- {
- // Try to get an error if sdk is not a valid one:
- // INVALID_SDK/HelloWorld -> ""
- name: "Test getFileExtensionBySdk() invalid sdk",
- args: args{precompiledObjectPath: "INVALID_SDK/HelloWorld"},
- want: "",
- wantErr: true,
- },
- }
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- got, err := getFileExtensionBySdk(tt.args.precompiledObjectPath)
- if (err != nil) != tt.wantErr {
- t.Errorf("getFileExtensionBySdk() error = %v, wantErr %v", err, tt.wantErr)
- return
- }
- if got != tt.want {
- t.Errorf("getFileExtensionBySdk() got = %v, want %v", got, tt.want)
- }
- })
- }
-}
-
-func Benchmark_GetPrecompiledObjects(b *testing.B) {
- for i := 0; i < b.N; i++ {
- _, _ = bucket.GetPrecompiledObjects(ctx, targetSdk, "", defaultBucketName)
- }
-}
-
-func Benchmark_GetPrecompiledObjectOutput(b *testing.B) {
- for i := 0; i < b.N; i++ {
- _, _ = bucket.GetPrecompiledObjectOutput(ctx, precompiledObjectPath, defaultBucketName)
- }
-}
-
-func Benchmark_GetPrecompiledObjectCode(b *testing.B) {
- for i := 0; i < b.N; i++ {
- _, _ = bucket.GetPrecompiledObjectCode(ctx, precompiledObjectPath, defaultBucketName)
- }
-}
-
-func Benchmark_GetPrecompiledObject(b *testing.B) {
- for i := 0; i < b.N; i++ {
- _, _ = bucket.GetPrecompiledObject(ctx, precompiledObjectPath, defaultBucketName)
- }
-}
diff --git a/playground/backend/internal/db/entity/snippet.go b/playground/backend/internal/db/entity/snippet.go
index f95eeb04b99..9ad85ac24f0 100644
--- a/playground/backend/internal/db/entity/snippet.go
+++ b/playground/backend/internal/db/entity/snippet.go
@@ -57,27 +57,31 @@ type Snippet struct {
// ID generates id according to content of the entity
func (s *Snippet) ID() (string, error) {
+ id, err := generateIDBasedOnContent(s.Salt, combineUniqueSnippetContent(s), s.IdLength)
+ if err != nil {
+ return "", err
+ }
+ return id, nil
+}
+
+func combineUniqueSnippetContent(snippet *Snippet) string {
var files []string
- for _, v := range s.Files {
- files = append(files, strings.TrimSpace(v.Content)+strings.TrimSpace(v.Name))
+ for _, file := range snippet.Files {
+ files = append(files, strings.TrimSpace(file.Content)+strings.TrimSpace(file.Name))
}
sort.Strings(files)
var contentBuilder strings.Builder
for i, file := range files {
contentBuilder.WriteString(file)
if i == len(files)-1 {
- contentBuilder.WriteString(fmt.Sprintf("%v%s", s.Snippet.Sdk, strings.TrimSpace(s.Snippet.PipeOpts)))
+ contentBuilder.WriteString(fmt.Sprintf("%v%s", snippet.Snippet.Sdk, strings.TrimSpace(snippet.Snippet.PipeOpts)))
}
}
- id, err := generateID(s.Salt, contentBuilder.String(), s.IdLength)
- if err != nil {
- return "", err
- }
- return id, nil
+
+ return contentBuilder.String()
}
-//TODO after removing the cloud storage this method should be deleted. It's a duplicate code from utils package
-func generateID(salt, content string, length int8) (string, error) {
+func generateIDBasedOnContent(salt, content string, length int8) (string, error) {
hash := sha256.New()
if _, err := io.WriteString(hash, salt); err != nil {
logger.Errorf("ID(): error during hash generation: %s", err.Error())
diff --git a/playground/backend/internal/db/mapper/datastore_mapper_test.go b/playground/backend/internal/db/mapper/datastore_mapper_test.go
index d44a1586de4..11796de6ac6 100644
--- a/playground/backend/internal/db/mapper/datastore_mapper_test.go
+++ b/playground/backend/internal/db/mapper/datastore_mapper_test.go
@@ -31,7 +31,7 @@ var testable *DatastoreMapper
var datastoreMapperCtx = context.Background()
func TestMain(m *testing.M) {
- appEnv := environment.NewApplicationEnvs("/app", "", "", "", "", "", "../../../.", nil, 0)
+ appEnv := environment.NewApplicationEnvs("/app", "", "", "", "", "../../../.", nil, 0)
appEnv.SetSchemaVersion("MOCK_SCHEMA")
props, _ := environment.NewProperties(appEnv.PropertyPath())
testable = NewDatastoreMapper(datastoreMapperCtx, appEnv, props)
diff --git a/playground/backend/internal/db/schema/migration/migration_v001_test.go b/playground/backend/internal/db/schema/migration/migration_v001_test.go
index 2fc1aa78d52..24b35403cf9 100644
--- a/playground/backend/internal/db/schema/migration/migration_v001_test.go
+++ b/playground/backend/internal/db/schema/migration/migration_v001_test.go
@@ -60,7 +60,7 @@ func teardown() {
}
func TestInitialStructure_InitiateData(t *testing.T) {
- appEnvs := environment.NewApplicationEnvs("/app", "", "", "", "", "../../../../../sdks-emulator.yaml", "../../../../.", nil, 0)
+ appEnvs := environment.NewApplicationEnvs("/app", "", "", "", "../../../../../sdks-emulator.yaml", "../../../../.", nil, 0)
props, err := environment.NewProperties(appEnvs.PropertyPath())
if err != nil {
t.Errorf("InitiateData(): error during properties initialization, err: %s", err.Error())
diff --git a/playground/backend/internal/environment/application.go b/playground/backend/internal/environment/application.go
index 9cbb764cf13..b32a41a8e69 100644
--- a/playground/backend/internal/environment/application.go
+++ b/playground/backend/internal/environment/application.go
@@ -100,9 +100,6 @@ type ApplicationEnvs struct {
// pipelinesFolder is name of folder in which the pipelines resources are stored
pipelinesFolder string
- // bucketName is a name of the GCS's bucket with examples
- bucketName string
-
// schemaVersion is the database schema version
schemaVersion string
@@ -115,7 +112,7 @@ type ApplicationEnvs struct {
// NewApplicationEnvs constructor for ApplicationEnvs
func NewApplicationEnvs(
- workingDir, launchSite, projectId, pipelinesFolder, bucketName, sdkConfigPath, propertyPath string,
+ workingDir, launchSite, projectId, pipelinesFolder, sdkConfigPath, propertyPath string,
cacheEnvs *CacheEnvs,
pipelineExecuteTimeout time.Duration,
) *ApplicationEnvs {
@@ -126,7 +123,6 @@ func NewApplicationEnvs(
launchSite: launchSite,
projectId: projectId,
pipelinesFolder: pipelinesFolder,
- bucketName: bucketName,
sdkConfigPath: sdkConfigPath,
propertyPath: propertyPath,
}
@@ -162,11 +158,6 @@ func (ae *ApplicationEnvs) PipelinesFolder() string {
return ae.pipelinesFolder
}
-// BucketName returns name of the GCS's bucket with examples
-func (ae *ApplicationEnvs) BucketName() string {
- return ae.bucketName
-}
-
// SchemaVersion returns the database schema version
func (ae *ApplicationEnvs) SchemaVersion() string {
return ae.schemaVersion
diff --git a/playground/backend/internal/environment/environment_service.go b/playground/backend/internal/environment/environment_service.go
index 74b90d98ca9..0cbf8149d5e 100644
--- a/playground/backend/internal/environment/environment_service.go
+++ b/playground/backend/internal/environment/environment_service.go
@@ -16,8 +16,6 @@
package environment
import (
- pb "beam.apache.org/playground/backend/internal/api/v1"
- "beam.apache.org/playground/backend/internal/logger"
"encoding/json"
"errors"
"fmt"
@@ -28,6 +26,9 @@ import (
"strconv"
"strings"
"time"
+
+ pb "beam.apache.org/playground/backend/internal/api/v1"
+ "beam.apache.org/playground/backend/internal/logger"
)
const (
@@ -60,8 +61,6 @@ const (
jsonExt = ".json"
configFolderName = "configs"
defaultNumOfParallelJobs = 20
- bucketNameKey = "BUCKET_NAME"
- defaultBucketName = "playground-precompiled-objects"
SDKConfigPathKey = "SDK_CONFIG"
defaultSDKConfigPath = "../sdks.yaml"
propertyPathKey = "PROPERTY_PATH"
@@ -107,7 +106,6 @@ func GetApplicationEnvsFromOsEnvs() (*ApplicationEnvs, error) {
launchSite := getEnv(launchSiteKey, defaultLaunchSite)
projectId := os.Getenv(projectIdKey)
pipelinesFolder := getEnv(pipelinesFolderKey, defaultPipelinesFolder)
- bucketName := getEnv(bucketNameKey, defaultBucketName)
sdkConfigPath := getEnv(SDKConfigPathKey, defaultSDKConfigPath)
propertyPath := getEnv(propertyPathKey, defaultPropertyPath)
@@ -127,7 +125,7 @@ func GetApplicationEnvsFromOsEnvs() (*ApplicationEnvs, error) {
}
if value, present := os.LookupEnv(workingDirKey); present {
- return NewApplicationEnvs(value, launchSite, projectId, pipelinesFolder, bucketName, sdkConfigPath, propertyPath, NewCacheEnvs(cacheType, cacheAddress, cacheExpirationTime), pipelineExecuteTimeout), nil
+ return NewApplicationEnvs(value, launchSite, projectId, pipelinesFolder, sdkConfigPath, propertyPath, NewCacheEnvs(cacheType, cacheAddress, cacheExpirationTime), pipelineExecuteTimeout), nil
}
return nil, errors.New("APP_WORK_DIR env should be provided with os.env")
}
diff --git a/playground/backend/internal/environment/environment_service_test.go b/playground/backend/internal/environment/environment_service_test.go
index f6c893f417c..04eb13d4b98 100644
--- a/playground/backend/internal/environment/environment_service_test.go
+++ b/playground/backend/internal/environment/environment_service_test.go
@@ -16,7 +16,6 @@
package environment
import (
- pb "beam.apache.org/playground/backend/internal/api/v1"
"fmt"
"io/fs"
"os"
@@ -24,6 +23,8 @@ import (
"reflect"
"testing"
"time"
+
+ pb "beam.apache.org/playground/backend/internal/api/v1"
)
const (
@@ -104,7 +105,7 @@ func TestNewEnvironment(t *testing.T) {
{name: "Create env service with default envs", want: &Environment{
NetworkEnvs: *NewNetworkEnvs(defaultIp, defaultPort, defaultProtocol),
BeamSdkEnvs: *NewBeamEnvs(defaultSdk, executorConfig, preparedModDir, 0),
- ApplicationEnvs: *NewApplicationEnvs("/app", defaultLaunchSite, defaultProjectId, defaultPipelinesFolder, defaultBucketName, defaultSDKConfigPath, defaultPropertyPath, &CacheEnvs{defaultCacheType, defaultCacheAddress, defaultCacheKeyExpirationTime}, defaultPipelineExecuteTimeout),
+ ApplicationEnvs: *NewApplicationEnvs("/app", defaultLaunchSite, defaultProjectId, defaultPipelinesFolder, defaultSDKConfigPath, defaultPropertyPath, &CacheEnvs{defaultCacheType, defaultCacheAddress, defaultCacheKeyExpirationTime}, defaultPipelineExecuteTimeout),
}},
}
for _, tt := range tests {
@@ -112,7 +113,7 @@ func TestNewEnvironment(t *testing.T) {
if got := NewEnvironment(
*NewNetworkEnvs(defaultIp, defaultPort, defaultProtocol),
*NewBeamEnvs(defaultSdk, executorConfig, preparedModDir, 0),
- *NewApplicationEnvs("/app", defaultLaunchSite, defaultProjectId, defaultPipelinesFolder, defaultBucketName, defaultSDKConfigPath, defaultPropertyPath, &CacheEnvs{defaultCacheType, defaultCacheAddress, defaultCacheKeyExpirationTime}, defaultPipelineExecuteTimeout)); !reflect.DeepEqual(got, tt.want) {
+ *NewApplicationEnvs("/app", defaultLaunchSite, defaultProjectId, defaultPipelinesFolder, defaultSDKConfigPath, defaultPropertyPath, &CacheEnvs{defaultCacheType, defaultCacheAddress, defaultCacheKeyExpirationTime}, defaultPipelineExecuteTimeout)); !reflect.DeepEqual(got, tt.want) {
t.Errorf("NewEnvironment() = %v, want %v", got, tt.want)
}
})
@@ -223,7 +224,7 @@ func Test_getApplicationEnvsFromOsEnvs(t *testing.T) {
}{
{
name: "Working dir is provided",
- want: NewApplicationEnvs("/app", defaultLaunchSite, defaultProjectId, defaultPipelinesFolder, defaultBucketName, defaultSDKConfigPath, defaultPropertyPath, &CacheEnvs{defaultCacheType, defaultCacheAddress, defaultCacheKeyExpirationTime}, defaultPipelineExecuteTimeout),
+ want: NewApplicationEnvs("/app", defaultLaunchSite, defaultProjectId, defaultPipelinesFolder, defaultSDKConfigPath, defaultPropertyPath, &CacheEnvs{defaultCacheType, defaultCacheAddress, defaultCacheKeyExpirationTime}, defaultPipelineExecuteTimeout),
wantErr: false,
envsToSet: map[string]string{workingDirKey: "/app", launchSiteKey: defaultLaunchSite, projectIdKey: defaultProjectId},
},
@@ -234,25 +235,25 @@ func Test_getApplicationEnvsFromOsEnvs(t *testing.T) {
},
{
name: "CacheKeyExpirationTimeKey is set with the correct value",
- want: NewApplicationEnvs("/app", defaultLaunchSite, defaultProjectId, defaultPipelinesFolder, defaultBucketName, defaultSDKConfigPath, defaultPropertyPath, &CacheEnvs{defaultCacheType, defaultCacheAddress, convertedTime}, defaultPipelineExecuteTimeout),
+ want: NewApplicationEnvs("/app", defaultLaunchSite, defaultProjectId, defaultPipelinesFolder, defaultSDKConfigPath, defaultPropertyPath, &CacheEnvs{defaultCacheType, defaultCacheAddress, convertedTime}, defaultPipelineExecuteTimeout),
wantErr: false,
envsToSet: map[string]string{workingDirKey: "/app", cacheKeyExpirationTimeKey: hour},
},
{
name: "CacheKeyExpirationTimeKey is set with the incorrect value",
- want: NewApplicationEnvs("/app", defaultLaunchSite, defaultProjectId, defaultPipelinesFolder, defaultBucketName, defaultSDKConfigPath, defaultPropertyPath, &CacheEnvs{defaultCacheType, defaultCacheAddress, defaultCacheKeyExpirationTime}, defaultPipelineExecuteTimeout),
+ want: NewApplicationEnvs("/app", defaultLaunchSite, defaultProjectId, defaultPipelinesFolder, defaultSDKConfigPath, defaultPropertyPath, &CacheEnvs{defaultCacheType, defaultCacheAddress, defaultCacheKeyExpirationTime}, defaultPipelineExecuteTimeout),
wantErr: false,
envsToSet: map[string]string{workingDirKey: "/app", cacheKeyExpirationTimeKey: "1"},
},
{
name: "CacheKeyExpirationTimeKey is set with the correct value",
- want: NewApplicationEnvs("/app", defaultLaunchSite, defaultProjectId, defaultPipelinesFolder, defaultBucketName, defaultSDKConfigPath, defaultPropertyPath, &CacheEnvs{defaultCacheType, defaultCacheAddress, defaultCacheKeyExpirationTime}, convertedTime),
+ want: NewApplicationEnvs("/app", defaultLaunchSite, defaultProjectId, defaultPipelinesFolder, defaultSDKConfigPath, defaultPropertyPath, &CacheEnvs{defaultCacheType, defaultCacheAddress, defaultCacheKeyExpirationTime}, convertedTime),
wantErr: false,
envsToSet: map[string]string{workingDirKey: "/app", pipelineExecuteTimeoutKey: hour},
},
{
name: "PipelineExecuteTimeoutKey is set with the incorrect value",
- want: NewApplicationEnvs("/app", defaultLaunchSite, defaultProjectId, defaultPipelinesFolder, defaultBucketName, defaultSDKConfigPath, defaultPropertyPath, &CacheEnvs{defaultCacheType, defaultCacheAddress, defaultCacheKeyExpirationTime}, defaultPipelineExecuteTimeout),
+ want: NewApplicationEnvs("/app", defaultLaunchSite, defaultProjectId, defaultPipelinesFolder, defaultSDKConfigPath, defaultPropertyPath, &CacheEnvs{defaultCacheType, defaultCacheAddress, defaultCacheKeyExpirationTime}, defaultPipelineExecuteTimeout),
wantErr: false,
envsToSet: map[string]string{workingDirKey: "/app", pipelineExecuteTimeoutKey: "1"},
},
diff --git a/playground/backend/internal/utils/datastore_utils.go b/playground/backend/internal/utils/datastore_utils.go
index 89f980fc163..120b126d9c3 100644
--- a/playground/backend/internal/utils/datastore_utils.go
+++ b/playground/backend/internal/utils/datastore_utils.go
@@ -17,10 +17,7 @@ package utils
import (
"context"
- "crypto/sha256"
- "encoding/base64"
"fmt"
- "io"
"os"
"strconv"
"strings"
@@ -28,27 +25,9 @@ import (
"cloud.google.com/go/datastore"
"beam.apache.org/playground/backend/internal/constants"
- "beam.apache.org/playground/backend/internal/errors"
"beam.apache.org/playground/backend/internal/logger"
)
-func ID(salt, content string, length int8) (string, error) {
- hash := sha256.New()
- if _, err := io.WriteString(hash, salt); err != nil {
- logger.Errorf("ID(): error during hash generation: %s", err.Error())
- return "", errors.InternalError("Error during hash generation", "Error writing hash and salt")
- }
- hash.Write([]byte(content))
- sum := hash.Sum(nil)
- b := make([]byte, base64.URLEncoding.EncodedLen(len(sum)))
- base64.URLEncoding.Encode(b, sum)
- hashLen := int(length)
- for hashLen <= len(b) && b[hashLen-1] == '_' {
- hashLen++
- }
- return string(b)[:hashLen], nil
-}
-
func GetExampleKey(ctx context.Context, values ...interface{}) *datastore.Key {
id := GetIDWithDelimiter(values...)
return getNameKey(ctx, constants.ExampleKind, id, nil)
diff --git a/playground/backend/internal/utils/datastore_utils_test.go b/playground/backend/internal/utils/datastore_utils_test.go
index 2f7bb444033..e6113d7b4b7 100644
--- a/playground/backend/internal/utils/datastore_utils_test.go
+++ b/playground/backend/internal/utils/datastore_utils_test.go
@@ -24,42 +24,6 @@ import (
var ctx = context.Background()
-func TestID(t *testing.T) {
- type args struct {
- salt string
- content string
- length int8
- }
- tests := []struct {
- name string
- args args
- want string
- wantErr bool
- }{
- {
- name: "ID generation in the usual case",
- args: args{
- salt: "MOCK_SALT",
- content: "MOCK_CONTENT",
- length: 11,
- },
- want: "Zl_s-8seE6k",
- wantErr: false,
- },
- }
-
- for _, tt := range tests {
- result, err := ID(tt.args.salt, tt.args.content, tt.args.length)
- if (err != nil) != tt.wantErr {
- t.Errorf("ID() error = %v, wantErr %v", err, tt.wantErr)
- return
- }
- if result != tt.want {
- t.Errorf("ID() result = %v, want %v", result, tt.want)
- }
- }
-}
-
func TestGetExampleKey(t *testing.T) {
exampleKey := GetExampleKey(ctx, "MOCK_ID")
if exampleKey.Namespace != constants.Namespace ||
diff --git a/playground/backend/internal/utils/precompiled_objects_utils.go b/playground/backend/internal/utils/precompiled_objects_utils.go
index 2b779ab5878..3683a2ec44b 100644
--- a/playground/backend/internal/utils/precompiled_objects_utils.go
+++ b/playground/backend/internal/utils/precompiled_objects_utils.go
@@ -17,54 +17,8 @@ package utils
import (
pb "beam.apache.org/playground/backend/internal/api/v1"
- "beam.apache.org/playground/backend/internal/cache"
- "beam.apache.org/playground/backend/internal/cloud_bucket"
- "beam.apache.org/playground/backend/internal/logger"
- "context"
- "fmt"
)
-// PutPrecompiledObjectsToCategory adds categories with precompiled objects to protobuf object
-func PutPrecompiledObjectsToCategory(categoryName string, precompiledObjects *cloud_bucket.PrecompiledObjects, sdkCategory *pb.Categories) {
- category := pb.Categories_Category{
- CategoryName: categoryName,
- PrecompiledObjects: make([]*pb.PrecompiledObject, 0),
- }
- for _, object := range *precompiledObjects {
- category.PrecompiledObjects = append(category.PrecompiledObjects, &pb.PrecompiledObject{
- CloudPath: object.CloudPath,
- Name: object.Name,
- Description: object.Description,
- Type: object.Type,
- PipelineOptions: object.PipelineOptions,
- Link: object.Link,
- Multifile: object.Multifile,
- ContextLine: object.ContextLine,
- DefaultExample: object.DefaultExample,
- })
- }
- sdkCategory.Categories = append(sdkCategory.Categories, &category)
-}
-
-// GetCatalogFromStorage returns the precompiled objects catalog from the cloud storage
-func GetCatalogFromStorage(ctx context.Context, bucketName string) ([]*pb.Categories, error) {
- bucket := cloud_bucket.New()
- sdkToCategories, err := bucket.GetPrecompiledObjects(ctx, pb.Sdk_SDK_UNSPECIFIED, "", bucketName)
- if err != nil {
- logger.Errorf("GetPrecompiledObjects(): cloud storage error: %s", err.Error())
- return nil, err
- }
- sdkCategories := make([]*pb.Categories, 0)
- for sdkName, categories := range *sdkToCategories {
- sdkCategory := pb.Categories{Sdk: pb.Sdk(pb.Sdk_value[sdkName]), Categories: make([]*pb.Categories_Category, 0)}
- for categoryName, precompiledObjects := range categories {
- PutPrecompiledObjectsToCategory(categoryName, &precompiledObjects, &sdkCategory)
- }
- sdkCategories = append(sdkCategories, &sdkCategory)
- }
- return sdkCategories, nil
-}
-
// FilterCatalog returns the catalog filtered by sdk and categoryName
func FilterCatalog(catalog []*pb.Categories, sdk pb.Sdk, categoryName string) []*pb.Categories {
var result []*pb.Categories
@@ -96,44 +50,3 @@ func FilterCatalog(catalog []*pb.Categories, sdk pb.Sdk, categoryName string) []
}
return result
}
-
-// GetDefaultPrecompiledObject returns the default precompiled objects from cache for sdk
-func GetDefaultPrecompiledObject(ctx context.Context, sdk pb.Sdk, cacheService cache.Cache, bucketName string) (*pb.PrecompiledObject, error) {
- precompiledObject, err := cacheService.GetDefaultPrecompiledObject(ctx, sdk)
- if err != nil {
- logger.Errorf("GetDefaultPrecompiledObject(): error during getting default precompiled object %s", err.Error())
- bucket := cloud_bucket.New()
- defaultPrecompiledObjects, err := bucket.GetDefaultPrecompiledObjects(ctx, bucketName)
- if err != nil {
- return nil, err
- }
- for sdk, precompiledObject := range defaultPrecompiledObjects {
- if err := cacheService.SetDefaultPrecompiledObject(ctx, sdk, precompiledObject); err != nil {
- logger.Errorf("GetPrecompiledObjects(): cache error: %s", err.Error())
- }
- }
- precompiledObject, ok := defaultPrecompiledObjects[sdk]
- if !ok {
- return nil, fmt.Errorf("no default precompiled object found for this sdk: %s", sdk)
- }
- return precompiledObject, nil
- }
- return precompiledObject, nil
-}
-
-// GetCatalogFromCacheOrStorage returns the precompiled objects catalog from cache
-// - If there is no catalog in the cache, gets the catalog from the Storage and saves it to the cache
-func GetCatalogFromCacheOrStorage(ctx context.Context, cacheService cache.Cache, bucketName string) ([]*pb.Categories, error) {
- catalog, err := cacheService.GetCatalog(ctx)
- if err != nil {
- logger.Errorf("GetCatalog(): cache error: %s", err.Error())
- catalog, err = GetCatalogFromStorage(ctx, bucketName)
- if err != nil {
- return nil, err
- }
- if err = cacheService.SetCatalog(ctx, catalog); err != nil {
- logger.Errorf("SetCatalog(): cache error: %s", err.Error())
- }
- }
- return catalog, nil
-}
diff --git a/playground/backend/internal/utils/precompiled_objects_utils_test.go b/playground/backend/internal/utils/precompiled_objects_utils_test.go
index f8cbe93e2af..4f1e2ee30c1 100644
--- a/playground/backend/internal/utils/precompiled_objects_utils_test.go
+++ b/playground/backend/internal/utils/precompiled_objects_utils_test.go
@@ -16,68 +16,11 @@
package utils
import (
- pb "beam.apache.org/playground/backend/internal/api/v1"
- "beam.apache.org/playground/backend/internal/cache"
- "beam.apache.org/playground/backend/internal/cache/local"
- "beam.apache.org/playground/backend/internal/cloud_bucket"
- "beam.apache.org/playground/backend/internal/logger"
- "context"
"reflect"
"testing"
-)
-func TestPutPrecompiledObjectsToCategory(t *testing.T) {
- precompiledObjectToAdd := &cloud_bucket.PrecompiledObjects{
- {"TestName", "SDK_JAVA/TestCategory/TestName.java", "TestDescription", pb.PrecompiledObjectType_PRECOMPILED_OBJECT_TYPE_EXAMPLE, []string{""}, "", "", false, 1, false},
- }
- type args struct {
- categoryName string
- precompiledObjects *cloud_bucket.PrecompiledObjects
- sdkCategory *pb.Categories
- }
- tests := []struct {
- name string
- args args
- want *pb.Categories
- }{
- {
- name: "Test PutPrecompiledObjectsToCategory",
- args: args{
- categoryName: "TestCategory",
- precompiledObjects: precompiledObjectToAdd,
- sdkCategory: &pb.Categories{
- Sdk: pb.Sdk_SDK_JAVA,
- Categories: []*pb.Categories_Category{},
- },
- },
- want: &pb.Categories{
- Sdk: pb.Sdk_SDK_JAVA,
- Categories: []*pb.Categories_Category{
- {
- CategoryName: "TestCategory", PrecompiledObjects: []*pb.PrecompiledObject{
- {
- CloudPath: "SDK_JAVA/TestCategory/TestName.java",
- Name: "TestName",
- Description: "TestDescription",
- Type: pb.PrecompiledObjectType_PRECOMPILED_OBJECT_TYPE_EXAMPLE,
- ContextLine: 1,
- },
- },
- },
- },
- },
- },
- }
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- PutPrecompiledObjectsToCategory(tt.args.categoryName, tt.args.precompiledObjects, tt.args.sdkCategory)
- got := tt.args.sdkCategory
- if !reflect.DeepEqual(got, tt.want) {
- t.Errorf("PutPrecompiledObjectsToCategory() got = %v, want %v", got, tt.want)
- }
- })
- }
-}
+ pb "beam.apache.org/playground/backend/internal/api/v1"
+)
func TestFilterPrecompiledObjects(t *testing.T) {
catalog := []*pb.Categories{
@@ -218,67 +161,3 @@ func TestFilterPrecompiledObjects(t *testing.T) {
})
}
}
-
-func TestGetDefaultPrecompiledObject(t *testing.T) {
- ctx := context.Background()
- cacheService := local.New(ctx)
- defaultPrecompiledObject := &pb.PrecompiledObject{
- CloudPath: "cloudPath",
- Name: "Name",
- Description: "Description",
- Type: pb.PrecompiledObjectType_PRECOMPILED_OBJECT_TYPE_EXAMPLE,
- PipelineOptions: "--key value",
- Link: "Link",
- ContextLine: 1,
- DefaultExample: true,
- }
- err := cacheService.SetDefaultPrecompiledObject(ctx, pb.Sdk_SDK_JAVA, defaultPrecompiledObject)
- if err != nil {
- logger.Errorf("Error during set up test")
- }
-
- type args struct {
- ctx context.Context
- sdk pb.Sdk
- cacheService cache.Cache
- }
- tests := []struct {
- name string
- args args
- want *pb.PrecompiledObject
- wantErr bool
- }{
- {
- name: "there is default precompiled object",
- args: args{
- ctx: ctx,
- sdk: pb.Sdk_SDK_JAVA,
- cacheService: cacheService,
- },
- want: defaultPrecompiledObject,
- wantErr: false,
- },
- {
- name: "there is no default precompiled object",
- args: args{
- ctx: ctx,
- sdk: pb.Sdk_SDK_UNSPECIFIED,
- cacheService: cacheService,
- },
- want: nil,
- wantErr: true,
- },
- }
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- got, err := GetDefaultPrecompiledObject(tt.args.ctx, tt.args.sdk, tt.args.cacheService, "")
- if (err != nil) != tt.wantErr {
- t.Errorf("GetDefaultPrecompiledObject() error = %v, wantErr %v", err, tt.wantErr)
- return
- }
- if !reflect.DeepEqual(got, tt.want) {
- t.Errorf("GetDefaultPrecompiledObject() got = %v, want %v", got, tt.want)
- }
- })
- }
-}
diff --git a/playground/infrastructure/proxy/allow_list.py b/playground/infrastructure/proxy/allow_list.py
index f0f3d0471dd..1ef52f73b2a 100644
--- a/playground/infrastructure/proxy/allow_list.py
+++ b/playground/infrastructure/proxy/allow_list.py
@@ -29,5 +29,4 @@ ALLOWED_BUCKET_LIST = [
"dataflow-samples",
"beam-samples",
"apache-beam-samples",
- "playground-precompiled-objects",
]