Dataset Viewer
text
stringlengths 0
243
|
---|
docker network create --subnet=172.20.0.0/16 datamakingnet # create custom network |
1. Create ZooKeeper Container |
docker pull zookeeper:3.4 |
docker run -d --hostname zookeepernode --net datamakingnet --ip 172.20.1.3 --name datamaking_zookeeper --publish 2181:2181 zookeeper:3.4 |
2. Create Kafka Container |
docker pull ches/kafka |
docker run -d --hostname kafkanode --net datamakingnet --ip 172.20.1.4 --name datamaking_kafka --publish 9092:9092 --publish 7203:7203 --env KAFKA_ADVERTISED_HOST_NAME=192.168.99.100 --env ZOOKEEPER_IP=192.168.99.100 ches/kafka |
docker images |
docker ps |
docker ps -a |
sudo -i |
ssh-keygen |
clear |
sudo apt update |
sudo apt install awscli -y |
clea |
clear |
aws configure |
curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl" |
sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl |
curl -Lo kops https://github.com/kubernetes/kops/releases/download/$(curl -s https://api.github.com/repos/kubernetes/kops/releases/latest | grep tag_name | cut -d '"' -f 4)/kops-linux-amd64 |
chmod +x kops |
sudo mv kops /usr/local/bin/kops |
kops |
kubectl |
clear |
kops create cluster --name=kubevpro.groophy.in --state=s3://vproile-kops-state --zones=us-west-2a,us-west-2b --node-count=2 --node-size=t3.small \ |
clear |
kops create cluster --name=kubevpro.groophy.in --state=s3://vproile-kops-state --zones=us-west-2a,us-west-2b --node-count=2 --node-size=t3.small --master-size=t3.medium --dns-zone=kubevpro.groophy.in --node-volume-size=8 --master-volume-size=8 |
kops update cluster --name kubevpro.groophy.in --state=s3://vproile-kops-state --yes --admin |
clear |
kubectl get nodes |
clear |
kubectl get nodes |
kubectl get nodes -o wide |
kubectl describe node ip-172-20-86-99.us-west-2.compute.internal |
clear |
kubectl get nodes |
kubectl get nodes ip-172-20-41-44.us-west-2.compute.internal -o yaml |
clear |
kubectl get nodes ip-172-20-41-44.us-west-2.compute.internal -o json |
clear |
history |
clear |
vim pod1.yaml |
kubectl apply -f pod1.yaml |
kubectl get pod |
kubectl get pod -o wide |
kubectl get pod nginx -o yaml |
clear |
kubectl get pod nginx -o json |
clear |
kubectl describe pod nginx |
clear |
kubectl delete pod nginx |
ls |
kubectl run nginx1 --image=nginx |
kubectl get pod |
kubectl edit pod nginx |
kubectl edit pod nginx1 |
kubectl get pod |
# |
# This constraints file was automatically generated on 2024-09-16T16:29:09.578311 |
# via "eager-upgrade" mechanism of PIP. For the "v2-10-test" branch of Airflow. |
# This variant of constraints install uses the HEAD of the branch version for 'apache-airflow' but installs |
# the providers from PIP-released packages at the moment of the constraint generation. |
# |
# Those constraints are actually those that regular users use to install released version of Airflow. |
# We also use those constraints after "apache-airflow" is released and the constraints are tagged with |
# "constraints-X.Y.Z" tag to build the production image for that version. |
# |
# This constraints file is meant to be used only in the "apache-airflow" installation command and not |
# in all subsequent pip commands. By using a constraints.txt file, we ensure that solely the Airflow |
# installation step is reproducible. Subsequent pip commands may install packages that would have |
# been incompatible with the constraints used in Airflow reproducible installation step. Finally, pip |
# commands that might change the installed version of apache-airflow should include "apache-airflow==X.Y.Z" |
# in the list of install targets to prevent Airflow accidental upgrade or downgrade. |
# |
# Typical installation process of airflow for Python 3.8 is (with random selection of extras and custom |
# dependencies added), usually consists of two steps: |
# |
# 1. Reproducible installation of airflow with selected providers (note constraints are used): |
# |
# pip install "apache-airflow[celery,cncf.kubernetes,google,amazon,snowflake]==X.Y.Z" \ |
# --constraint \ |
# "https://raw.githubusercontent.com/apache/airflow/constraints-X.Y.Z/constraints-3.8.txt" |
# |
# 2. Installing own dependencies that are potentially not matching the constraints (note constraints are not |
End of preview. Expand
in Data Studio
No dataset card yet
- Downloads last month
- 9