mirror of
https://github.com/DarkflameUniverse/DarkflameServer.git
synced 2024-11-09 09:48:20 +00:00
WIP: do more post build steps
This commit is contained in:
parent
6f2e306c7e
commit
50221fbd65
@ -23,6 +23,7 @@ services:
|
||||
dockerfile: ./docker/Dockerfile
|
||||
args:
|
||||
- BUILD_THREADS=${BUILD_THREADS:-1}
|
||||
- BUILD_VERSION=${BUILD_VERSION:-171022}
|
||||
environment:
|
||||
- DATABASE=${MARIADB_DATABASE:-darkflame}
|
||||
- DATABASE_HOST=database
|
||||
|
@ -1,12 +1,16 @@
|
||||
FROM debian:11-slim
|
||||
FROM debian:11-slim as build
|
||||
|
||||
WORKDIR /build
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt \
|
||||
echo "Install build dependencies" && \
|
||||
apt update && \
|
||||
apt install gcc cmake zlib1g-dev make build-essential g++ mariadb-client git python3 -yqq --no-install-recommends && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
apt install gcc cmake zlib1g-dev make build-essential g++ unzip ca-certificates wget -yqq --no-install-recommends && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
update-ca-certificates && \
|
||||
wget https://dev.mysql.com/get/Downloads/Connector-C++/libmysqlcppconn9_8.0.27-1debian11_amd64.deb -O /tmp/libmysqlcppconn.deb && \
|
||||
dpkg -i /tmp/libmysqlcppconn.deb && \
|
||||
rm /tmp/libmysqlcppconn.deb
|
||||
|
||||
COPY dAuthServer/ /build/dAuthServer
|
||||
COPY dChatServer/ /build/dChatServer
|
||||
@ -27,18 +31,35 @@ COPY vanity /build/vanity
|
||||
COPY .clang-* CMake* LICENSE /build/
|
||||
|
||||
ARG BUILD_THREADS=1
|
||||
ARG BUILD_VERSION=171022
|
||||
|
||||
RUN echo "Build server" && \
|
||||
mkdir -p build && \
|
||||
cd build && \
|
||||
ls -lah && ls -lah ../ && cmake .. && \
|
||||
make -j $BUILD_THREADS && \
|
||||
mkdir -p /app && \
|
||||
cp -R ./* /app && \
|
||||
rm -rf /build
|
||||
sed -i -e "s/171023/${BUILD_VERSION}/g" ../CMakeVariables.txt && \
|
||||
cmake .. && \
|
||||
make -j $BUILD_THREADS
|
||||
|
||||
RUN mkdir -p /build/build/res/maps/navmeshes/ && \
|
||||
unzip /build/resources/navmeshes.zip -d /build/build/res/maps
|
||||
|
||||
FROM debian:11-slim as runtime
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=build /build/build /app
|
||||
|
||||
COPY --from=build /build/migrations /app/migrations
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt \
|
||||
apt update && \
|
||||
apt install mariadb-client python3 sqlite3 ca-certificates wget -yqq --no-install-recommends && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
update-ca-certificates && \
|
||||
wget https://dev.mysql.com/get/Downloads/Connector-C++/libmysqlcppconn9_8.0.27-1debian11_amd64.deb -O /tmp/libmysqlcppconn.deb && \
|
||||
dpkg -i /tmp/libmysqlcppconn.deb && \
|
||||
rm /tmp/libmysqlcppconn.deb
|
||||
|
||||
ADD docker/*.py /app/utils/
|
||||
|
||||
COPY docker/start_server.sh /start_server.sh
|
||||
|
217
docker/fdb_to_sqlite.py
Normal file
217
docker/fdb_to_sqlite.py
Normal file
@ -0,0 +1,217 @@
|
||||
"""Module for converting a FDB database to a SQLite database"""
|
||||
import argparse
|
||||
import os
|
||||
import sqlite3
|
||||
import struct
|
||||
from collections import OrderedDict
|
||||
|
||||
# There seems to be no difference between 4 and 8, but just in case there is I'm keeping that type info
|
||||
SQLITE_TYPE = {}
|
||||
SQLITE_TYPE[0] = "none"
|
||||
SQLITE_TYPE[1] = "int32"
|
||||
SQLITE_TYPE[3] = "real"
|
||||
SQLITE_TYPE[4] = "text_4"
|
||||
SQLITE_TYPE[5] = "int_bool"
|
||||
SQLITE_TYPE[6] = "int64"
|
||||
SQLITE_TYPE[8] = "text_8"
|
||||
|
||||
|
||||
def pointer_scope(func):
|
||||
"""The FDB format has a lot of pointers to structures, so this decorator automatically reads the pointer, seeks to the pointer position, calls the function, and seeks back."""
|
||||
|
||||
def wrapper(self, *args, **kwargs):
|
||||
pointer = kwargs.get("pointer")
|
||||
|
||||
if pointer == None:
|
||||
pointer = self._read_int32()
|
||||
else:
|
||||
del kwargs["pointer"]
|
||||
|
||||
if pointer == -1:
|
||||
return
|
||||
|
||||
current_pos = self.fdb.tell()
|
||||
self.fdb.seek(pointer)
|
||||
|
||||
result = func(self, *args, **kwargs)
|
||||
|
||||
self.fdb.seek(current_pos)
|
||||
return result
|
||||
return wrapper
|
||||
|
||||
# I'm using a class for this to save things like the fdb and the sqlite without using globals
|
||||
|
||||
|
||||
class convert:
|
||||
def __init__(self, in_file, out_file=None, add_link_info=False):
|
||||
self.add_link_info = add_link_info
|
||||
if out_file == None:
|
||||
out_file = os.path.splitext(os.path.basename(in_file))[
|
||||
0] + ".sqlite"
|
||||
|
||||
if os.path.exists(out_file):
|
||||
os.remove(out_file)
|
||||
|
||||
self.fdb = open(in_file, "rb")
|
||||
self.sqlite = sqlite3.connect(out_file)
|
||||
|
||||
self._read()
|
||||
print("-"*79)
|
||||
print("Finished converting database!")
|
||||
print("-"*79)
|
||||
|
||||
self.sqlite.commit()
|
||||
self.sqlite.close()
|
||||
self.fdb.close()
|
||||
|
||||
def _read(self):
|
||||
number_of_tables = self._read_int32()
|
||||
self._read_tables(number_of_tables)
|
||||
|
||||
@pointer_scope
|
||||
def _read_tables(self, number_of_tables):
|
||||
for table_struct_index in range(number_of_tables):
|
||||
table_name, number_of_columns = self._read_column_header()
|
||||
print("[%2i%%] Reading table %s" %
|
||||
(table_struct_index*100//number_of_tables, table_name))
|
||||
self._read_row_header(table_name, number_of_columns)
|
||||
|
||||
@pointer_scope
|
||||
def _read_column_header(self):
|
||||
number_of_columns = self._read_int32()
|
||||
table_name = self._read_string()
|
||||
columns = self._read_columns(number_of_columns)
|
||||
|
||||
sql = "create table if not exists '%s' (%s)" % \
|
||||
(table_name, ", ".join(
|
||||
["'%s' %s" % (col, SQLITE_TYPE[columns[col]]) for col in columns]))
|
||||
|
||||
self.sqlite.execute(sql)
|
||||
return table_name, len(columns)
|
||||
|
||||
@pointer_scope
|
||||
def _read_columns(self, number_of_columns):
|
||||
columns = OrderedDict()
|
||||
|
||||
for _ in range(number_of_columns):
|
||||
data_type = self._read_int32()
|
||||
name = self._read_string()
|
||||
columns[name] = data_type
|
||||
|
||||
if self.add_link_info:
|
||||
columns["_linked_from"] = 1
|
||||
columns["_does_link"] = 5
|
||||
columns["_invalid"] = 5
|
||||
|
||||
return columns
|
||||
|
||||
@pointer_scope
|
||||
def _read_row_header(self, table_name, number_of_columns):
|
||||
number_of_allocated_rows = self._read_int32()
|
||||
if number_of_allocated_rows != 0:
|
||||
# assert power of 2 allocation size
|
||||
assert number_of_allocated_rows & (
|
||||
number_of_allocated_rows - 1) == 0
|
||||
|
||||
self.sqlite.executemany("insert into '%s' values (%s)" % (table_name, ", ".join(
|
||||
["?"] * number_of_columns)), self._read_rows(number_of_allocated_rows, number_of_columns))
|
||||
|
||||
@pointer_scope
|
||||
def _read_rows(self, number_of_allocated_rows, number_of_columns):
|
||||
rowid = 0
|
||||
percent_read = -1 # -1 so 0% is displayed as new
|
||||
for row in range(number_of_allocated_rows):
|
||||
new_percent_read = row*100//number_of_allocated_rows
|
||||
if new_percent_read > percent_read:
|
||||
percent_read = new_percent_read
|
||||
print("[%2i%%] Reading rows" % percent_read, end="\r")
|
||||
|
||||
row_pointer = self._read_int32()
|
||||
if row_pointer == -1:
|
||||
if self.add_link_info:
|
||||
# invalid row
|
||||
yield (None,) * (number_of_columns-1) + (True,)
|
||||
rowid += 1
|
||||
else:
|
||||
linked_rows, rowid = self._read_row(rowid, pointer=row_pointer)
|
||||
for values in linked_rows:
|
||||
yield values
|
||||
|
||||
@pointer_scope
|
||||
def _read_row(self, rowid):
|
||||
rows = []
|
||||
linked_from = None
|
||||
while True:
|
||||
row_values = self._read_row_info()
|
||||
linked = self._read_int32()
|
||||
if self.add_link_info:
|
||||
row_values.append(linked_from)
|
||||
row_values.append(linked != -1)
|
||||
row_values.append(False) # valid row
|
||||
rows.append(row_values)
|
||||
|
||||
rowid += 1
|
||||
|
||||
if linked == -1:
|
||||
break
|
||||
|
||||
self.fdb.seek(linked)
|
||||
linked_from = rowid
|
||||
|
||||
return rows, rowid
|
||||
|
||||
@pointer_scope
|
||||
def _read_row_info(self):
|
||||
number_of_columns = self._read_int32()
|
||||
return self._read_row_values(number_of_columns)
|
||||
|
||||
@pointer_scope
|
||||
def _read_row_values(self, number_of_columns):
|
||||
values = []
|
||||
|
||||
for _ in range(number_of_columns):
|
||||
data_type = self._read_int32()
|
||||
if data_type == 0:
|
||||
assert self.fdb.read(4) == b"\0\0\0\0"
|
||||
value = None
|
||||
elif data_type == 1:
|
||||
value = self._read_int32()
|
||||
elif data_type == 3:
|
||||
value = struct.unpack("f", self.fdb.read(4))[0]
|
||||
elif data_type in (4, 8):
|
||||
value = self._read_string()
|
||||
elif data_type == 5:
|
||||
value = struct.unpack("?xxx", self.fdb.read(4))[0]
|
||||
elif data_type == 6:
|
||||
value = self._read_int64()
|
||||
else:
|
||||
raise NotImplementedError(data_type)
|
||||
|
||||
values.append(value)
|
||||
return values
|
||||
|
||||
def _read_int32(self):
|
||||
return struct.unpack("i", self.fdb.read(4))[0]
|
||||
|
||||
@pointer_scope
|
||||
def _read_string(self):
|
||||
str_bytes = bytearray()
|
||||
while True:
|
||||
byte = self.fdb.read(1)
|
||||
if byte == b"\0":
|
||||
break
|
||||
str_bytes += byte
|
||||
return str_bytes.decode("latin1")
|
||||
|
||||
@pointer_scope
|
||||
def _read_int64(self):
|
||||
return struct.unpack("q", self.fdb.read(8))[0]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("fdb_path")
|
||||
parser.add_argument("--sqlite_path")
|
||||
parser.add_argument("--add_link_info", action="store_true")
|
||||
args = parser.parse_args()
|
||||
convert(args.fdb_path, args.sqlite_path, args.add_link_info)
|
@ -48,6 +48,40 @@ function update_ini_values() {
|
||||
update_ini worldconfig.ini max_clients $MAX_CLIENTS
|
||||
}
|
||||
|
||||
function symlink_client_files() {
|
||||
ln -s /client/client/res/macros/ /app/res/macros
|
||||
ln -s /client/client/res/BrickModels/ /app/res/BrickModels
|
||||
ln -s /client/client/res/chatplus_en_us.txt /app/res/chatplus_en_us.txt
|
||||
ln -s /client/client/res/names/ /app/res/names
|
||||
ln -s /client/client/res/maps/ /app/res/maps
|
||||
ln -s /client/client/locale/locale.xml /app/locale/locale.xml
|
||||
}
|
||||
|
||||
function fdb_to_sqlite() {
|
||||
echo "Run fdb_to_sqlite"
|
||||
python3 /app/utils/fdb_to_sqlite.py /client/client/res/CDClient.fdb --sqlite_path /app/res/CDServer.sqlite
|
||||
|
||||
(
|
||||
cd /app/migrations/cdserver
|
||||
readarray -d '' entries < <(printf '%s\0' *.sql | sort -zV)
|
||||
for entry in "${entries[@]}"; do
|
||||
echo "Execute $entry"
|
||||
sqlite3 ../../res/CDServer.sqlite < $entry
|
||||
done
|
||||
)
|
||||
}
|
||||
|
||||
function run_db_migrations() {
|
||||
(
|
||||
cd /app/migrations/dlu
|
||||
readarray -d '' entries < <(printf '%s\0' *.sql | sort -zV)
|
||||
for entry in "${entries[@]}"; do
|
||||
echo "Execute $entry"
|
||||
mysql -h"$DATABASE_HOST" -P"$DATABASE_PORT" -u"$DATABASE_USER" -p"$DATABASE_PASSWORD" $DATABASE < $entry
|
||||
done
|
||||
)
|
||||
}
|
||||
|
||||
set_defaults
|
||||
|
||||
check_sql_connection
|
||||
@ -64,14 +98,21 @@ if [[ ! -f "/client/extracted" ]]; then
|
||||
echo "Start client resource extraction"
|
||||
|
||||
python3 /app/utils/pkextractor.py /client/ /client/
|
||||
|
||||
|
||||
touch /client/extracted
|
||||
else
|
||||
echo "Client already extracted. Skip this step"
|
||||
echo "If you want to force re-extract, just delete the file called \"extracted\" in the client directory"
|
||||
fi
|
||||
|
||||
while [[ 1 ]]; do
|
||||
sleep 1
|
||||
echo "Hello"
|
||||
done
|
||||
symlink_client_files
|
||||
|
||||
fdb_to_sqlite
|
||||
|
||||
run_db_migrations
|
||||
|
||||
echo "Start MasterServer"
|
||||
|
||||
./MasterServer
|
||||
|
||||
tail -f /dev/null
|
Loading…
Reference in New Issue
Block a user