Skip to content

Commit

Permalink
chore: add private endpoint support in bigquery example
Browse files Browse the repository at this point in the history
  • Loading branch information
metaclips committed Sep 6, 2024
1 parent a838cb7 commit d13af7f
Show file tree
Hide file tree
Showing 5 changed files with 80 additions and 27 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ const axios = require('axios');
const { JWT } = require('google-auth-library');
const { BigQuery } = require('@google-cloud/bigquery');


const projectId = process.env.GOOGLE_CLOUD_PROJECT;
if (!projectId) {
console.error('GOOGLE_CLOUD_PROJECT environment variable must be set.');
Expand All @@ -15,6 +14,12 @@ if (!credentials_base64) {
process.exit(1);
}

const private_endpoint_name = process.env.PRIVATE_ENDPOINT_NAME;
if (!private_endpoint_name) {
console.error('PRIVATE_ENDPOINT_NAME environment variable must be set.');
process.exit(1);
}

const credentials_json = Buffer.from(credentials_base64, 'base64').toString('utf-8');
const credentials = JSON.parse(credentials_json);

Expand Down Expand Up @@ -55,7 +60,7 @@ class CustomBigQueryClient extends BigQuery {
...reqOpts.headers,
'Authorization': `Bearer ${token}`,
'Content-Type': 'application/json',
'Host': 'bigquery.googleapis.com',
'Host': `bigquery-${private_endpoint_name}.p.googleapis.com`,
},
data: body,
};
Expand All @@ -68,7 +73,7 @@ class CustomBigQueryClient extends BigQuery {
}
}

const bigQueryClient = new CustomBigQueryClient('effortless-cat-433609-h1');
const bigQueryClient = new CustomBigQueryClient(projectId);

async function createDataset(datasetId) {
console.log(`Creating Dataset ${datasetId}`);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,13 +58,13 @@ run() {
until scp -o StrictHostKeyChecking=no -i ./key.pem ./app.js "ec2-user@$ip:app.js"; do sleep 10; done
ssh -o StrictHostKeyChecking=no -i ./key.pem "ec2-user@$ip" \
'bash -s' << EOS
# Wait for private endpoint to be up.
while ! curl -H "Host: bigquery-${PRIVATE_ENDPOINT_NAME}.p.googleapis.com" http://127.0.0.1:8080/discovery/v1/apis/bigquery/v2/rest --connect-timeout 2 --max-time 5 --silent > /dev/null; do sleep 5 && echo "private endpoint not up yet... retrying"; done
export GOOGLE_CLOUD_PROJECT="$GOOGLE_CLOUD_PROJECT_ID"
export GOOGLE_APPLICATION_CREDENTIALS_BASE64="$GOOGLE_APPLICATION_CREDENTIALS_BASE64"
sudo yum update -y && sudo yum install nodejs -y
npm install @google-cloud/bigquery
npm install google-auth-library
npm install axios
node app.js
npm install @google-cloud/bigquery google-auth-library axios
PRIVATE_ENDPOINT_NAME="$PRIVATE_ENDPOINT_NAME" node app.js
EOS
}

Expand Down
66 changes: 57 additions & 9 deletions examples/command/portals/databases/bigquery/metrics_corp/run.sh
Original file line number Diff line number Diff line change
@@ -1,29 +1,77 @@
#!/usr/bin/env bash


run() {
enrollment_ticket="$1"
private_endpoint_address="10.200.0.5"

# ----------------------------------------------------------------------------------------------------------------
# CREATE PRIVATE GOOGLE API ENDPOINT (PRIVATE SERVICE CONNECT API)

# Create a new VPC.
gcloud compute networks create "${name}-vpc" --subnet-mode=custom --project="$GOOGLE_CLOUD_PROJECT_ID"

# Create a subnet in the VPC.
gcloud compute networks subnets create "${name}-subnet" --network="${name}-vpc" --project="$GOOGLE_CLOUD_PROJECT_ID" \
--range=10.0.0.0/24 --region=us-central1

# Enable Private Google Access for the subnet.
gcloud compute networks subnets update "${name}-subnet" --project="$GOOGLE_CLOUD_PROJECT_ID" \
--region=us-central1 --enable-private-ip-google-access

# Reserve an internal IP address for the private service connect (psc).
gcloud compute addresses create "${name}-psc-address" --global --project="$GOOGLE_CLOUD_PROJECT_ID" \
--purpose=PRIVATE_SERVICE_CONNECT --addresses="$private_endpoint_address" --network="${name}-vpc"

# Create a forwarding rule to connect to BigQuery using the reserved IP address.
gcloud compute forwarding-rules create "$PRIVATE_ENDPOINT_NAME" --global --project="$GOOGLE_CLOUD_PROJECT_ID" \
--network="${name}-vpc" --address="${name}-psc-address" --target-google-apis-bundle=all-apis

# Allow Egress traffic to the internet.
gcloud compute firewall-rules create allow-all-egress \
--network="${name}-vpc" --allow=all --direction=EGRESS --priority=1000 --destination-ranges=0.0.0.0/0 --target-tags=allow-egress


# ----------------------------------------------------------------------------------------------------------------
# CREATE INSTANCE AND START RELAY
# CREATE INSTANCE USING THE PRIVATE GOOGLE API ENDPOINT
sed "s/\$ENROLLMENT_TICKET/${enrollment_ticket}/g" run_ockam.sh > user_data1.sh
sed "s/\$OCKAM_VERSION/${OCKAM_VERSION}/g" user_data1.sh > user_data.sh
sed "s/\$OCKAM_VERSION/${OCKAM_VERSION}/g" user_data1.sh > user_data2.sh
sed "s/\$PRIVATE_ENDPOINT_NAME/${PRIVATE_ENDPOINT_NAME}/g" user_data2.sh > user_data.sh

gcloud compute instances create "${name}-key" \
gcloud compute instances create "${name}-vm-instance" \
--project="$GOOGLE_CLOUD_PROJECT_ID" \
--zone="us-central1-c" \
--create-disk=auto-delete=yes,boot=yes,device-name="${name}-key",image=projects/debian-cloud/global/images/debian-12-bookworm-v20240815,mode=rw,size=10,type=pd-balanced \
--zone="us-central1-a" \
--create-disk=auto-delete=yes,boot=yes,device-name="${name}-vm-instance",image=projects/debian-cloud/global/images/debian-12-bookworm-v20240815,mode=rw,size=10,type=pd-balanced \
--machine-type=e2-medium \
--network-interface=network-tier=PREMIUM,stack-type=IPV4_ONLY,subnet=default \
--tags="${name}-key" \
--subnet="${name}-subnet" \
--tags=allow-egress \
--metadata-from-file=startup-script=user_data.sh

rm -rf user_data*.sh
}

cleanup() {
# ----------------------------------------------------------------------------------------------------------------
# DELETE INSTANCE
gcloud compute instances delete "${name}-key" --zone="us-central1-c" --project="$GOOGLE_CLOUD_PROJECT_ID" --quiet || true
# DELETE NETWORK

# Delete forwarding rule
gcloud compute forwarding-rules delete "$PRIVATE_ENDPOINT_NAME" --global --quiet

# Delete reserved endpoint address
gcloud compute addresses delete "${name}-psc-address" --global --quiet

# Delete rule to allow egress
gcloud compute firewall-rules delete allow-all-egress --quiet

# ----------------------------------------------------------------------------------------------------------------
# DELETE INSTANCE RESOURCES
gcloud compute instances delete "${name}-vm-instance" --zone="us-central1-a" --project="$GOOGLE_CLOUD_PROJECT_ID" --quiet

# Delete subnet
gcloud compute networks subnets delete "${name}-subnet" --region=us-central1 --quiet
# Delete VPC
gcloud compute networks delete "${name}-vpc" --quiet

rm -rf user_data*.sh
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,23 +30,23 @@ ockam project enroll "$ENROLLMENT_TICKET"
# Create an ockam node.
#
# Create an encrypted relay to this node in the project at address: bigquery.googleapis.com.
# Create an encrypted relay to this node in the project at address: bigquery-$PRIVATE_ENDPOINT_NAME.p.googleapis.com
# where PRIVATE_ENDPOINT_NAME is our private endpoint name default know as ockamendpoint.
# The relay makes this node reachable by other project members.
#
# Create an access control policy that only allows project members that possesses a credential with
# attribute bigquery-inlet="true" to connect to TCP Portal Outlets on this node.
#
# Create a TCP Portal Outlet to BigQuery API at at - bigquery.googleapis.com:443.
# Create a TCP Portal Outlet to BigQuery API at at - bigquery-$PRIVATE_ENDPOINT_NAME.p.googleapis.com:443.
cat << EOF > outlet.yaml
tcp-outlet:
to: bigquery.googleapis.com:443
to: bigquery-$PRIVATE_ENDPOINT_NAME.p.googleapis.com:443
tls: true
allow: '(= subject.bigquery-inlet "true")'
relay: bigquery
EOF
cat outlet.yaml
ockam node create outlet.yaml
rm outlet.yaml
EOS
12 changes: 6 additions & 6 deletions examples/command/portals/databases/bigquery/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ set -e

# This script, `./run.sh ...` is invoked on a developer’s work machine.
#
# This hands-on example uses Ockam to create an end-to-end encrypted portal to BigQuery. We host a reverse proxy in GCP cloud
# and access through an Amazon VPC.
# This hands-on example uses Ockam to create an end-to-end encrypted portal to BigQuery. We connect a BigQuery private endpoint
# in a GCP VPC and access through an Amazon VPC.
#
# The example uses AWS CLI and gcloud CLI to create these VPCs.
#
Expand Down Expand Up @@ -49,7 +49,6 @@ run() {
export OCKAM_VERSION="v${OCKAM_VERSION}";
fi

# Ensure that a project ID is set
if [[ -z "$GOOGLE_CLOUD_PROJECT_ID" ]]; then
echo "ERROR: Please set the GOOGLE_CLOUD_PROJECT_ID environment variable"
exit 1
Expand All @@ -60,8 +59,6 @@ run() {
exit 1
fi

echo "$GOOGLE_APPLICATION_CREDENTIALS"

# Invoke `metrics_corp/run.sh` in the directory that has metrics_corp's configuration. Pass the above enrollment ticket
# as the first argument to run.sh script. Read metrics_corp/run.sh to understand the parts that are provisioned in
# metrics_corp's virtual private cloud.
Expand All @@ -88,10 +85,13 @@ if ! type ockam &>/dev/null; then
fi

# Check that tools we need are installed.
for c in aws curl; do
for c in aws curl gcloud; do
if ! type "$c" &>/dev/null; then echo "ERROR: Please install: $c" && exit 1; fi
done

# It is required to use alphanumeric characters for the private endpoint name.
export PRIVATE_ENDPOINT_NAME="ockamendpoint"

# Check if the first argument is "cleanup"
# If it is, call the cleanup function. If not, call the run function.
if [ "$1" = "cleanup" ]; then cleanup; else run; fi

0 comments on commit d13af7f

Please sign in to comment.