Clean up docker environment

This commit is contained in:
Mark Veidemanis 2022-10-19 16:45:18 +01:00
parent ed5fccfc66
commit 8c06ec44cb
Signed by: m
GPG Key ID: 5ACFCEED46C0904F
6 changed files with 70 additions and 89 deletions

20
Makefile Normal file
View File

@ -0,0 +1,20 @@
run:
docker-compose -f docker/docker-compose.prod.yml --env-file=stack.env up -d
build:
docker-compose -f docker/docker-compose.prod.yml --env-file=stack.env build
stop:
docker-compose -f docker/docker-compose.prod.yml --env-file=stack.env down
log:
docker-compose -f docker/docker-compose.prod.yml --env-file=stack.env logs -f
run-infra:
docker-compose -f docker/docker-compose.infra.yml --env-file=stack.env up -d
stop-infra:
docker-compose -f docker/docker-compose.infra.yml --env-file=stack.env down
log-infra:
docker-compose -f docker/docker-compose.infra.yml --env-file=stack.env logs -f

2
db.py
View File

@ -44,6 +44,8 @@ KEYNAME = "queue"
async def store_kafka_batch(data):
print("FAKE STORE KAFKA BATCH")
return
# log.debug(f"Storing Kafka batch of {len(data)} messages")
producer = AIOKafkaProducer(bootstrap_servers="kafka:9092")
await producer.start()

View File

@ -21,7 +21,7 @@ services:
volumes_from:
- tmp
depends_on:
broker:
druid:
condition: service_started
kafka:
condition: service_healthy

View File

@ -36,6 +36,9 @@ services:
- "${THRESHOLD_API_PORT}:${THRESHOLD_API_PORT}"
env_file:
- ../stack.env
# for development
extra_hosts:
- "host.docker.internal:host-gateway"
volumes_from:
- tmp
depends_on:
@ -46,12 +49,14 @@ services:
tmp:
image: busybox
container_name: tmp_monolith
command: chmod -R 777 /var/run/redis
volumes:
- /var/run/redis
redis:
image: redis
container_name: redis_monolith
command: redis-server /etc/redis.conf
ulimits:
nproc: 65535

View File

@ -1,87 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Java tuning
#DRUID_XMX=1g
#DRUID_XMS=1g
#DRUID_MAXNEWSIZE=250m
#DRUID_NEWSIZE=250m
#DRUID_MAXDIRECTMEMORYSIZE=1g
#druid_emitter_logging_logLevel=debug
#druid_extensions_loadList=["druid-histogram", "druid-datasketches", "druid-lookups-cached-global", "postgresql-metadata-storage", "druid-kafka-indexing-service"]
#druid_zk_service_host=zookeeper
#druid_metadata_storage_host=
#druid_metadata_storage_type=postgresql
#druid_metadata_storage_connector_connectURI=jdbc:postgresql://postgres:5432/druid
#druid_metadata_storage_connector_user=druid
#druid_metadata_storage_connector_password=FoolishPassword
#druid_coordinator_balancer_strategy=cachingCost
#druid_indexer_runner_javaOptsArray=["-server", "-Xmx1g", "-Xms1g", "-XX:MaxDirectMemorySize=3g", "-Duser.timezone=UTC", "-Dfile.encoding=UTF-8", "-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager"]
#druid_indexer_fork_property_druid_processing_buffer_sizeBytes=128MiB
#druid_processing_buffer_sizeBytes=268435456 # 256MiB
#druid_storage_type=local
#druid_storage_storageDirectory=/opt/shared/segments
#druid_indexer_logs_type=file
#druid_indexer_logs_directory=/opt/shared/indexing-logs
#druid_processing_numThreads=1
#druid_processing_numMergeBuffers=1
#DRUID_LOG4J=<?xml version="1.0" encoding="UTF-8" ?><Configuration status="WARN"><Appenders><Console name="Console" target="SYSTEM_OUT"><PatternLayout pattern="%d{ISO8601} %p [%t] %c - %m%n"/></Console></Appenders><Loggers><Root level="info"><AppenderRef ref="Console"/></Root><Logger name="org.apache.druid.jetty.RequestLog" additivity="false" level="DEBUG"><AppenderRef ref="Console"/></Logger></Loggers></Configuration>
# Java tuning
#DRUID_XMX=1g
#DRUID_XMS=1g
#DRUID_MAXNEWSIZE=250m
#DRUID_NEWSIZE=250m
#DRUID_MAXDIRECTMEMORYSIZE=6172m
DRUID_SINGLE_NODE_CONF=nano-quickstart
druid_emitter_logging_logLevel=debug
druid_extensions_loadList=["druid-histogram", "druid-datasketches", "druid-lookups-cached-global", "postgresql-metadata-storage", "druid-kafka-indexing-service"]
druid_zk_service_host=zookeeper
druid_metadata_storage_host=
druid_metadata_storage_type=postgresql
druid_metadata_storage_connector_connectURI=jdbc:postgresql://postgres:5432/druid
druid_metadata_storage_connector_user=druid
druid_metadata_storage_connector_password=FoolishPassword
druid_coordinator_balancer_strategy=cachingCost
druid_indexer_runner_javaOptsArray=["-server", "-Xmx1g", "-Xms1g", "-XX:MaxDirectMemorySize=3g", "-Duser.timezone=UTC", "-Dfile.encoding=UTF-8", "-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager"]
druid_indexer_fork_property_druid_processing_buffer_sizeBytes=256MiB
druid_storage_type=local
druid_storage_storageDirectory=/opt/shared/segments
druid_indexer_logs_type=file
druid_indexer_logs_directory=/opt/shared/indexing-logs
druid_processing_numThreads=2
druid_processing_numMergeBuffers=2
DRUID_LOG4J=<?xml version="1.0" encoding="UTF-8" ?><Configuration status="WARN"><Appenders><Console name="Console" target="SYSTEM_OUT"><PatternLayout pattern="%d{ISO8601} %p [%t] %c - %m%n"/></Console></Appenders><Loggers><Root level="info"><AppenderRef ref="Console"/></Root><Logger name="org.apache.druid.jetty.RequestLog" additivity="false" level="DEBUG"><AppenderRef ref="Console"/></Logger></Loggers></Configuration>

41
stack.env Normal file
View File

@ -0,0 +1,41 @@
PORTAINER_GIT_DIR=.
MODULES_ENABLED="dis"
DISCORD_TOKEN="MTAwMzY5NjYyNTkxMjQwMTk2MQ.GRFrdq.tY_XyfBRyV0KlVAhGnPz1DH8URqE5jgV1_JJi0"
THRESHOLD_LISTENER_HOST=0.0.0.0
THRESHOLD_LISTENER_PORT=13867
THRESHOLD_LISTENER_SSL=1
THRESHOLD_RELAY_ENABLED=0
THRESHOLD_RELAY_HOST=0.0.0.0
THRESHOLD_RELAY_PORT=13868
THRESHOLD_RELAY_SSL=1
THRESHOLD_API_ENABLED=1
THRESHOLD_API_HOST=0.0.0.0
THRESHOLD_API_PORT=13869
PORTAINER_GIT_DIR=.
THRESHOLD_CONFIG_DIR=./legacy/conf/live/
THRESHOLD_CERT_DIR=./legacy/conf/cert/
# How many messages to ingest at once from Redis
MONOLITH_INGEST_CHUNK_SIZE=8000
# Time to wait between polling Redis again
MONOLITH_INGEST_ITER_DELAY=0.5
# Number of 4chan threads to request at once
MONOLITH_CH4_THREADS_CONCURRENT=1000
# Time to wait between every MONOLITH_CH4_THREADS_CONCURRENT threads
MONOLITH_CH4_THREADS_DELAY=0.1
# Time to wait after finishing a crawl before starting again
MONOLITH_CH4_CRAWL_DELAY=30
# Semaphore value
MONOLITH_CH4_THREADS_SEMAPHORE=1000
# Threads to use for data processing
# Leave uncommented to use all available threads
MONOLITH_PROCESS_THREADS=4