Fix Redis, begin implementing MEXC

This commit is contained in:
Mark Veidemanis 2024-11-16 17:31:43 +00:00
parent 95a4a6930c
commit 761b084704
Signed by: m
GPG Key ID: 5ACFCEED46C0904F
13 changed files with 524 additions and 109 deletions

View File

@ -1,26 +1,26 @@
run: run:
docker-compose -f docker-compose.dev.yml --env-file=stack.env up -d docker-compose --env-file=stack.env up -d
build: build:
docker-compose -f docker-compose.dev.yml --env-file=stack.env build docker-compose --env-file=stack.env build
stop: stop:
docker-compose -f docker-compose.dev.yml --env-file=stack.env down docker-compose --env-file=stack.env down
log: log:
docker-compose -f docker-compose.dev.yml --env-file=stack.env logs -f docker-compose --env-file=stack.env logs -f
test: test:
docker-compose -f docker-compose.dev.yml --env-file=stack.env run -e LIVE=$(LIVE) --rm app_dev sh -c ". /venv/bin/activate && python manage.py test $(MODULES) -v 2" docker-compose --env-file=stack.env run -e LIVE=$(LIVE) --rm app_dev sh -c ". /venv/bin/activate && python manage.py test $(MODULES) -v 2"
migrate: migrate:
docker-compose -f docker-compose.dev.yml --env-file=stack.env run --rm app_dev sh -c ". /venv/bin/activate && python manage.py migrate" docker-compose --env-file=stack.env run --rm app_dev sh -c ". /venv/bin/activate && python manage.py migrate"
makemigrations: makemigrations:
docker-compose -f docker-compose.dev.yml --env-file=stack.env run --rm app_dev sh -c ". /venv/bin/activate && python manage.py makemigrations" docker-compose --env-file=stack.env run --rm app_dev sh -c ". /venv/bin/activate && python manage.py makemigrations"
auth: auth:
docker-compose -f docker-compose.dev.yml --env-file=stack.env run --rm app_dev sh -c ". /venv/bin/activate && python manage.py createsuperuser" docker-compose --env-file=stack.env run --rm app_dev sh -c ". /venv/bin/activate && python manage.py createsuperuser"
token: token:
docker-compose -f docker-compose.dev.yml --env-file=stack.env run --rm app_dev sh -c ". /venv/bin/activate && python manage.py addstatictoken m" docker-compose --env-file=stack.env run --rm app_dev sh -c ". /venv/bin/activate && python manage.py addstatictoken m"

View File

@ -1,26 +1,26 @@
run: run:
docker-compose --env-file=stack.env up -d docker-compose -f docker-compose.prod.yml --env-file=stack.env up -d
build: build:
docker-compose --env-file=stack.env build docker-compose -f docker-compose.prod.yml --env-file=stack.env build
stop: stop:
docker-compose --env-file=stack.env down docker-compose -f docker-compose.prod.yml --env-file=stack.env down
log: log:
docker-compose --env-file=stack.env logs -f docker-compose -f docker-compose.prod.yml --env-file=stack.env logs -f
test: test:
docker-compose --env-file=stack.env run -e LIVE=$(LIVE) --rm app sh -c ". /venv/bin/activate && python manage.py test $(MODULES) -v 2" docker-compose -f docker-compose.prod.yml --env-file=stack.env run -e LIVE=$(LIVE) --rm app sh -c ". /venv/bin/activate && python manage.py test $(MODULES) -v 2"
migrate: migrate:
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py migrate" docker-compose -f docker-compose.prod.yml --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py migrate"
makemigrations: makemigrations:
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py makemigrations" docker-compose -f docker-compose.prod.yml --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py makemigrations"
auth: auth:
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py createsuperuser" docker-compose -f docker-compose.prod.yml --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py createsuperuser"
token: token:
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py addstatictoken m" docker-compose -f docker-compose.prod.yml --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py addstatictoken m"

View File

@ -48,13 +48,18 @@ LAGO_URL = getenv("LAGO_URL", "")
DEBUG = getenv("DEBUG", "false").lower() in trues DEBUG = getenv("DEBUG", "false").lower() in trues
PROFILER = getenv("PROFILER", "false").lower() in trues PROFILER = getenv("PROFILER", "false").lower() in trues
REDIS_HOST = getenv("REDIS_HOST", "redis_fisk_dev")
REDIS_PASSWORD = getenv("REDIS_PASSWORD", "changeme")
REDIS_DB = int(getenv("REDIS_DB", "10"))
REDIS_PORT = int(getenv("REDIS_PORT", "6379"))
if DEBUG: if DEBUG:
import socket # only if you haven't already imported this import socket # only if you haven't already imported this
hostname, _, ips = socket.gethostbyname_ex(socket.gethostname()) hostname, _, ips = socket.gethostbyname_ex(socket.gethostname())
INTERNAL_IPS = [ip[: ip.rfind(".")] + ".1" for ip in ips] + [ INTERNAL_IPS = [ip[: ip.rfind(".")] + ".1" for ip in ips] + [
"127.0.0.1", "127.0.0.1",
"10.0.2.2", # "10.0.2.2",
] ]
SETTINGS_EXPORT = ["BILLING_ENABLED", "URL", "HOOK_PATH", "ASSET_PATH"] SETTINGS_EXPORT = ["BILLING_ENABLED", "URL", "HOOK_PATH", "ASSET_PATH"]

View File

@ -56,23 +56,6 @@ INSTALLED_APPS = [
"cachalot", "cachalot",
] ]
# Performance optimisations
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "unix:///var/run/socks/redis.sock",
"OPTIONS": {
"db": "10",
# "parser_class": "django_redis.cache.RedisCache",
"pool_class": "redis.BlockingConnectionPool",
},
}
}
# CACHE_MIDDLEWARE_ALIAS = 'default'
# CACHE_MIDDLEWARE_SECONDS = '600'
# CACHE_MIDDLEWARE_KEY_PREFIX = ''
CRISPY_TEMPLATE_PACK = "bulma" CRISPY_TEMPLATE_PACK = "bulma"
CRISPY_ALLOWED_TEMPLATE_PACKS = ("bulma",) CRISPY_ALLOWED_TEMPLATE_PACKS = ("bulma",)
DJANGO_TABLES2_TEMPLATE = "django-tables2/bulma.html" DJANGO_TABLES2_TEMPLATE = "django-tables2/bulma.html"
@ -184,7 +167,7 @@ REST_FRAMEWORK = {
INTERNAL_IPS = [ INTERNAL_IPS = [
"127.0.0.1", "127.0.0.1",
"10.1.10.11", # "10.1.10.11",
] ]
DEBUG_TOOLBAR_PANELS = [ DEBUG_TOOLBAR_PANELS = [
@ -208,6 +191,24 @@ DEBUG_TOOLBAR_PANELS = [
from app.local_settings import * # noqa from app.local_settings import * # noqa
# Performance optimisations
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
# "LOCATION": "unix:///var/run/socks/redis.sock",
"LOCATION": f"redis://{REDIS_HOST}:{REDIS_PORT}",
"OPTIONS": {
"db": REDIS_DB,
# "parser_class": "django_redis.cache.RedisCache",
"PASSWORD": REDIS_PASSWORD,
"pool_class": "redis.BlockingConnectionPool",
},
}
}
# CACHE_MIDDLEWARE_ALIAS = 'default'
# CACHE_MIDDLEWARE_SECONDS = '600'
# CACHE_MIDDLEWARE_KEY_PREFIX = ''
if PROFILER: # noqa - trust me its there if PROFILER: # noqa - trust me its there
import pyroscope import pyroscope

View File

@ -1,4 +1,5 @@
import os import os
import stripe import stripe
from django.conf import settings from django.conf import settings

69
core/exchanges/mexc.py Normal file
View File

@ -0,0 +1,69 @@
from core.exchanges import BaseExchange, common
from core.util import logs
from pymexc import spot, futures
log = logs.get_logger("mexc")
class MEXCExchange(BaseExchange):
def call_method(self, request):
...
def connect(self):
self.client = spot.HTTP(
api_key=self.account.api_key,
api_secret=self.account.api_key
)
def get_account(self):
r = self.client.account_information()
print("ACC INFO", r)
def get_instruments(self):
...
def get_currencies(self, currencies):
...
def get_supported_assets(self, response=None):
...
def get_balance(self, return_usd=False):
...
def get_market_value(self, symbol):
raise NotImplementedError
def post_trade(self, trade):
...
def get_trade_precision(self, symbol):
...
def close_trade(self, trade_id, units=None, symbol=None):
...
def get_trade(self, trade_id):
...
def update_trade(self, trade_id, take_profit_price, stop_loss_price):
...
def cancel_trade(self, trade_id):
...
def get_position_info(self, symbol):
...
def get_all_positions(self):
...
def get_all_open_trades(self):
...
def close_position(self, side, symbol):
...
def close_all_positions(self):
...

View File

@ -23,15 +23,16 @@ def initialise_elasticsearch():
def store_msg(index, msg): def store_msg(index, msg):
global client return
if not client: # global client
client = initialise_elasticsearch() # if not client:
if "ts" not in msg: # client = initialise_elasticsearch()
msg["ts"] = datetime.utcnow().isoformat() # if "ts" not in msg:
try: # msg["ts"] = datetime.utcnow().isoformat()
result = client.index(index=index, body=msg) # try:
except ConnectionError as e: # result = client.index(index=index, body=msg)
log.error(f"Error indexing '{msg}': {e}") # except ConnectionError as e:
return # log.error(f"Error indexing '{msg}': {e}")
if not result["result"] == "created": # return
log.error(f"Indexing of '{msg}' failed: {result}") # if not result["result"] == "created":
# log.error(f"Indexing of '{msg}' failed: {result}")

View File

@ -1,7 +1,8 @@
from decimal import Decimal as D from decimal import Decimal as D
from typing import Optional
from pydantic import BaseModel from pydantic import BaseModel
from typing import Optional
class PositionLong(BaseModel): class PositionLong(BaseModel):
units: str units: str
@ -373,7 +374,9 @@ class Instrument(BaseModel):
guaranteedStopLossOrderMode: str guaranteedStopLossOrderMode: str
tags: list[InstrumentTag] tags: list[InstrumentTag]
financing: InstrumentFinancing financing: InstrumentFinancing
guaranteedStopLossOrderLevelRestriction: Optional[InstrumentGuaranteedRestriction] = None guaranteedStopLossOrderLevelRestriction: Optional[
InstrumentGuaranteedRestriction
] = None
class AccountInstruments(BaseModel): class AccountInstruments(BaseModel):

View File

@ -8,6 +8,7 @@ from django.db import models
from core.exchanges.alpaca import AlpacaExchange from core.exchanges.alpaca import AlpacaExchange
from core.exchanges.fake import FakeExchange from core.exchanges.fake import FakeExchange
from core.exchanges.mexc import MEXCExchange
from core.exchanges.oanda import OANDAExchange from core.exchanges.oanda import OANDAExchange
# from core.lib.customers import get_or_create, update_customer_fields # from core.lib.customers import get_or_create, update_customer_fields
@ -15,7 +16,12 @@ from core.lib import billing
from core.util import logs from core.util import logs
log = logs.get_logger(__name__) log = logs.get_logger(__name__)
EXCHANGE_MAP = {"alpaca": AlpacaExchange, "oanda": OANDAExchange, "fake": FakeExchange} EXCHANGE_MAP = {
"alpaca": AlpacaExchange,
"oanda": OANDAExchange,
"mexc": MEXCExchange,
"fake": FakeExchange,
}
TYPE_CHOICES = ( TYPE_CHOICES = (
("market", "Market"), ("market", "Market"),
("limit", "Limit"), ("limit", "Limit"),
@ -141,7 +147,12 @@ class User(AbstractUser):
class Account(models.Model): class Account(models.Model):
EXCHANGE_CHOICES = (("alpaca", "Alpaca"), ("oanda", "OANDA"), ("fake", "Fake")) EXCHANGE_CHOICES = (
("alpaca", "Alpaca"),
("oanda", "OANDA"),
("mexc", "MEXC"),
("fake", "Fake"),
)
user = models.ForeignKey(User, on_delete=models.CASCADE) user = models.ForeignKey(User, on_delete=models.CASCADE)
name = models.CharField(max_length=255) name = models.CharField(max_length=255)
exchange = models.CharField(choices=EXCHANGE_CHOICES, max_length=255) exchange = models.CharField(choices=EXCHANGE_CHOICES, max_length=255)

316
docker-compose.prod.yml Normal file
View File

@ -0,0 +1,316 @@
version: "2.2"
services:
app:
image: xf/fisk:prod
container_name: fisk
build:
context: .
args:
OPERATION: ${OPERATION}
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- fisk_static:${STATIC_ROOT}
#ports:
# - "8000:8000" # uwsgi socket
# Dirty hack for Podman
environment:
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
NOTIFY_TOPIC: "${NOTIFY_TOPIC}"
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
DEBUG: "${DEBUG}"
SECRET_KEY: "${SECRET_KEY}"
STATIC_ROOT: "${STATIC_ROOT}"
REGISTRATION_OPEN: "${REGISTRATION_OPEN}"
OPERATION: "${OPERATION}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
ELASTICSEARCH_HOST: "${ELASTICSEARCH_HOST}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
TEST_ACCOUNT_NAME: "${TEST_ACCOUNT_NAME}"
TEST_ACCOUNT_EXCHANGE: "${TEST_ACCOUNT_EXCHANGE}"
TEST_ACCOUNT_API_KEY: "${TEST_ACCOUNT_API_KEY}"
TEST_ACCOUNT_API_SECRET: "${TEST_ACCOUNT_API_SECRET}"
PROFILER: "${PROFILER}"
BILLING_ENABLED: "${BILLING_ENABLED}"
LAGO_API_KEY: "${LAGO_API_KEY}"
LAGO_ORG_ID: "${LAGO_ORG_ID}"
LAGO_URL: "${LAGO_URL}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
REDIS_HOST: "${REDIS_HOST}"
# env_file:
# - stack.env
# volumes_from:
# - tmp
depends_on:
# redis:
# condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
networks:
- default
- xf
# - elastic
scheduling:
image: xf/fisk:prod
container_name: scheduling_fisk
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py scheduling'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- fisk_static:${STATIC_ROOT}
# Dirty hack for Podman
environment:
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
NOTIFY_TOPIC: "${NOTIFY_TOPIC}"
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
DEBUG: "${DEBUG}"
SECRET_KEY: "${SECRET_KEY}"
STATIC_ROOT: "${STATIC_ROOT}"
REGISTRATION_OPEN: "${REGISTRATION_OPEN}"
OPERATION: "${OPERATION}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
ELASTICSEARCH_HOST: "${ELASTICSEARCH_HOST}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
TEST_ACCOUNT_NAME: "${TEST_ACCOUNT_NAME}"
TEST_ACCOUNT_EXCHANGE: "${TEST_ACCOUNT_EXCHANGE}"
TEST_ACCOUNT_API_KEY: "${TEST_ACCOUNT_API_KEY}"
TEST_ACCOUNT_API_SECRET: "${TEST_ACCOUNT_API_SECRET}"
PROFILER: "${PROFILER}"
BILLING_ENABLED: "${BILLING_ENABLED}"
LAGO_API_KEY: "${LAGO_API_KEY}"
LAGO_ORG_ID: "${LAGO_ORG_ID}"
LAGO_URL: "${LAGO_URL}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
REDIS_HOST: "${REDIS_HOST}"
# env_file:
# - stack.env
# volumes_from:
# - tmp
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
networks:
- default
- xf
# - db
migration:
image: xf/fisk:prod
container_name: migration_fisk
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py migrate --noinput'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- fisk_static:${STATIC_ROOT}
# volumes_from:
# - tmp
# Dirty hack for Podman
environment:
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
NOTIFY_TOPIC: "${NOTIFY_TOPIC}"
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
DEBUG: "${DEBUG}"
SECRET_KEY: "${SECRET_KEY}"
STATIC_ROOT: "${STATIC_ROOT}"
REGISTRATION_OPEN: "${REGISTRATION_OPEN}"
OPERATION: "${OPERATION}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
ELASTICSEARCH_HOST: "${ELASTICSEARCH_HOST}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
TEST_ACCOUNT_NAME: "${TEST_ACCOUNT_NAME}"
TEST_ACCOUNT_EXCHANGE: "${TEST_ACCOUNT_EXCHANGE}"
TEST_ACCOUNT_API_KEY: "${TEST_ACCOUNT_API_KEY}"
TEST_ACCOUNT_API_SECRET: "${TEST_ACCOUNT_API_SECRET}"
PROFILER: "${PROFILER}"
BILLING_ENABLED: "${BILLING_ENABLED}"
LAGO_API_KEY: "${LAGO_API_KEY}"
LAGO_ORG_ID: "${LAGO_ORG_ID}"
LAGO_URL: "${LAGO_URL}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
REDIS_HOST: "${REDIS_HOST}"
# env_file:
# - stack.env
collectstatic:
image: xf/fisk:prod
container_name: collectstatic_fisk
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py collectstatic --noinput'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- fisk_static:${STATIC_ROOT}
# volumes_from:
# - tmp
# Dirty hack for Podman
environment:
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
NOTIFY_TOPIC: "${NOTIFY_TOPIC}"
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
DEBUG: "${DEBUG}"
SECRET_KEY: "${SECRET_KEY}"
STATIC_ROOT: "${STATIC_ROOT}"
REGISTRATION_OPEN: "${REGISTRATION_OPEN}"
OPERATION: "${OPERATION}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
ELASTICSEARCH_HOST: "${ELASTICSEARCH_HOST}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
TEST_ACCOUNT_NAME: "${TEST_ACCOUNT_NAME}"
TEST_ACCOUNT_EXCHANGE: "${TEST_ACCOUNT_EXCHANGE}"
TEST_ACCOUNT_API_KEY: "${TEST_ACCOUNT_API_KEY}"
TEST_ACCOUNT_API_SECRET: "${TEST_ACCOUNT_API_SECRET}"
PROFILER: "${PROFILER}"
BILLING_ENABLED: "${BILLING_ENABLED}"
LAGO_API_KEY: "${LAGO_API_KEY}"
LAGO_ORG_ID: "${LAGO_ORG_ID}"
LAGO_URL: "${LAGO_URL}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
REDIS_HOST: "${REDIS_HOST}"
# env_file:
# - stack.env
nginx:
image: nginx:latest
container_name: nginx_fisk
ports:
- ${APP_PORT}:9999
ulimits:
nproc: 65535
nofile:
soft: 65535
hard: 65535
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/nginx/conf.d/${OPERATION}.conf:/etc/nginx/conf.d/default.conf
- fisk_static:${STATIC_ROOT}
# volumes_from:
# - tmp
networks:
- default
- xf
depends_on:
app:
condition: service_started
# volumes_from:
# - tmp
# depends_on:
# redis:
# condition: service_healthy
# tmp:
# image: busybox
# container_name: tmp_fisk
# command: chmod -R 777 /var/run/socks
# volumes:
# - /var/run/socks
# For caching
redis:
image: redis
container_name: redis_fisk
command: redis-server /etc/redis.conf
ulimits:
nproc: 65535
nofile:
soft: 65535
hard: 65535
volumes:
- ${PORTAINER_GIT_DIR}/docker/redis.conf:/etc/redis.conf
- fisk_redis_data:/data
# volumes_from:
# - tmp
healthcheck:
test: "redis-cli ping"
interval: 2s
timeout: 2s
retries: 15
# pyroscope:
# image: "pyroscope/pyroscope:latest"
# ports:
# - "4040:4040"
# command:
# - "server"
networks:
default:
driver: bridge
xf:
external: true
# db:
# external: true
volumes:
fisk_static: {}
fisk_redis_data: {}

View File

@ -1,9 +1,11 @@
version: "2.2" version: "2.2"
name: fisk_dev
services: services:
app: app_dev:
image: xf/fisk:prod image: xf/fisk:dev
container_name: fisk container_name: fisk_dev
build: build:
context: . context: .
args: args:
@ -12,28 +14,28 @@ services:
- ${PORTAINER_GIT_DIR}:/code - ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini - ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
- ${APP_DATABASE_FILE}:/conf/db.sqlite3 - ${APP_DATABASE_FILE}:/conf/db.sqlite3
- fisk_static:${STATIC_ROOT} - fisk_static_dev:${STATIC_ROOT}
#ports: #ports:
# - "8000:8000" # uwsgi socket # - "8000:8000" # uwsgi socket
env_file: env_file:
- stack.env - stack.env
volumes_from: # volumes_from:
- tmp # - tmp_dev
depends_on: depends_on:
# redis: # redis:
# condition: service_healthy # condition: service_healthy
migration: migration_dev:
condition: service_started condition: service_started
collectstatic: collectstatic_dev:
condition: service_started condition: service_started
networks: networks:
- default - default
- xf - xf
- elastic # - db
scheduling: scheduling_dev:
image: xf/fisk:prod image: xf/fisk:dev
container_name: scheduling_fisk container_name: scheduling_fisk_dev
build: build:
context: . context: .
args: args:
@ -43,26 +45,26 @@ services:
- ${PORTAINER_GIT_DIR}:/code - ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini - ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
- ${APP_DATABASE_FILE}:/conf/db.sqlite3 - ${APP_DATABASE_FILE}:/conf/db.sqlite3
- fisk_static:${STATIC_ROOT} - fisk_static_dev:${STATIC_ROOT}
env_file: env_file:
- stack.env - stack.env
volumes_from: # volumes_from:
- tmp # - tmp_dev
depends_on: depends_on:
redis: redis_dev:
condition: service_healthy condition: service_healthy
migration: migration_dev:
condition: service_started condition: service_started
collectstatic: collectstatic_dev:
condition: service_started condition: service_started
networks: networks:
- default - default
- xf - xf
- elastic # - db
migration: migration_dev:
image: xf/fisk:prod image: xf/fisk:dev
container_name: migration_fisk container_name: migration_fisk_dev
build: build:
context: . context: .
args: args:
@ -71,15 +73,15 @@ services:
volumes: volumes:
- ${PORTAINER_GIT_DIR}:/code - ${PORTAINER_GIT_DIR}:/code
- ${APP_DATABASE_FILE}:/conf/db.sqlite3 - ${APP_DATABASE_FILE}:/conf/db.sqlite3
- fisk_static:${STATIC_ROOT} - fisk_static_dev:${STATIC_ROOT}
volumes_from: # volumes_from:
- tmp # - tmp_dev
env_file: env_file:
- stack.env - stack.env
collectstatic: collectstatic_dev:
image: xf/fisk:prod image: xf/fisk:dev
container_name: collectstatic_fisk container_name: collectstatic_fisk_dev
build: build:
context: . context: .
args: args:
@ -88,15 +90,15 @@ services:
volumes: volumes:
- ${PORTAINER_GIT_DIR}:/code - ${PORTAINER_GIT_DIR}:/code
- ${APP_DATABASE_FILE}:/conf/db.sqlite3 - ${APP_DATABASE_FILE}:/conf/db.sqlite3
- fisk_static:${STATIC_ROOT} - fisk_static_dev:${STATIC_ROOT}
volumes_from: # volumes_from:
- tmp # - tmp_dev
env_file: env_file:
- stack.env - stack.env
nginx: nginx_dev:
image: nginx:latest image: nginx:latest
container_name: nginx_fisk container_name: nginx_fisk_dev
ports: ports:
- ${APP_PORT}:9999 - ${APP_PORT}:9999
ulimits: ulimits:
@ -107,14 +109,14 @@ services:
volumes: volumes:
- ${PORTAINER_GIT_DIR}:/code - ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/nginx/conf.d/${OPERATION}.conf:/etc/nginx/conf.d/default.conf - ${PORTAINER_GIT_DIR}/docker/nginx/conf.d/${OPERATION}.conf:/etc/nginx/conf.d/default.conf
- fisk_static:${STATIC_ROOT} - fisk_static_dev:${STATIC_ROOT}
volumes_from: # volumes_from:
- tmp # - tmp_dev
networks: networks:
- default - default
- xf - xf
depends_on: depends_on:
app: app_dev:
condition: service_started condition: service_started
@ -124,17 +126,17 @@ services:
# redis: # redis:
# condition: service_healthy # condition: service_healthy
tmp: # tmp_dev:
image: busybox # image: busybox
container_name: tmp_fisk # container_name: tmp_fisk_dev
command: chmod -R 777 /var/run/socks # command: chmod -R 777 /var/run/socks
volumes: # volumes:
- /var/run/socks # - /var/run/socks
# For caching # For caching
redis: redis_dev:
image: redis image: redis
container_name: redis_fisk container_name: redis_fisk_dev
command: redis-server /etc/redis.conf command: redis-server /etc/redis.conf
ulimits: ulimits:
nproc: 65535 nproc: 65535
@ -143,11 +145,11 @@ services:
hard: 65535 hard: 65535
volumes: volumes:
- ${PORTAINER_GIT_DIR}/docker/redis.conf:/etc/redis.conf - ${PORTAINER_GIT_DIR}/docker/redis.conf:/etc/redis.conf
- fisk_redis_data:/data - fisk_redis_data_dev:/data
volumes_from: # volumes_from:
- tmp # - tmp_dev
healthcheck: healthcheck:
test: "redis-cli -s /var/run/socks/redis.sock ping" test: "redis-cli ping"
interval: 2s interval: 2s
timeout: 2s timeout: 2s
retries: 15 retries: 15
@ -164,9 +166,9 @@ networks:
driver: bridge driver: bridge
xf: xf:
external: true external: true
elastic: # db:
external: true # external: true
volumes: volumes:
fisk_static: {} fisk_static_dev: {}
fisk_redis_data: {} fisk_redis_data_dev: {}

View File

@ -1,2 +1,4 @@
unixsocket /var/run/socks/redis.sock # unixsocket /var/run/socks/redis.sock
unixsocketperm 777 # unixsocketperm 777
port 6379
requirepass changeme

View File

@ -17,8 +17,12 @@ django-otp-yubikey
phonenumbers phonenumbers
qrcode qrcode
pydantic pydantic
# Alpaca
alpaca-py alpaca-py
# OANDA
oandapyV20 oandapyV20
# MEXC
pymexc
glom glom
elasticsearch elasticsearch
apscheduler apscheduler