WIP: do more post build steps

This commit is contained in:
Nils Bergmann 2021-12-07 16:29:26 +01:00
parent 6f2e306c7e
commit 50221fbd65
No known key found for this signature in database
GPG Key ID: 5FAEC08EE1DC2DF9
4 changed files with 293 additions and 13 deletions

View File

@ -23,6 +23,7 @@ services:
dockerfile: ./docker/Dockerfile dockerfile: ./docker/Dockerfile
args: args:
- BUILD_THREADS=${BUILD_THREADS:-1} - BUILD_THREADS=${BUILD_THREADS:-1}
- BUILD_VERSION=${BUILD_VERSION:-171022}
environment: environment:
- DATABASE=${MARIADB_DATABASE:-darkflame} - DATABASE=${MARIADB_DATABASE:-darkflame}
- DATABASE_HOST=database - DATABASE_HOST=database

View File

@ -1,12 +1,16 @@
FROM debian:11-slim FROM debian:11-slim as build
WORKDIR /build WORKDIR /build
RUN --mount=type=cache,target=/var/cache/apt \ RUN --mount=type=cache,target=/var/cache/apt \
echo "Install build dependencies" && \ echo "Install build dependencies" && \
apt update && \ apt update && \
apt install gcc cmake zlib1g-dev make build-essential g++ mariadb-client git python3 -yqq --no-install-recommends && \ apt install gcc cmake zlib1g-dev make build-essential g++ unzip ca-certificates wget -yqq --no-install-recommends && \
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/* && \
update-ca-certificates && \
wget https://dev.mysql.com/get/Downloads/Connector-C++/libmysqlcppconn9_8.0.27-1debian11_amd64.deb -O /tmp/libmysqlcppconn.deb && \
dpkg -i /tmp/libmysqlcppconn.deb && \
rm /tmp/libmysqlcppconn.deb
COPY dAuthServer/ /build/dAuthServer COPY dAuthServer/ /build/dAuthServer
COPY dChatServer/ /build/dChatServer COPY dChatServer/ /build/dChatServer
@ -27,18 +31,35 @@ COPY vanity /build/vanity
COPY .clang-* CMake* LICENSE /build/ COPY .clang-* CMake* LICENSE /build/
ARG BUILD_THREADS=1 ARG BUILD_THREADS=1
ARG BUILD_VERSION=171022
RUN echo "Build server" && \ RUN echo "Build server" && \
mkdir -p build && \ mkdir -p build && \
cd build && \ cd build && \
ls -lah && ls -lah ../ && cmake .. && \ sed -i -e "s/171023/${BUILD_VERSION}/g" ../CMakeVariables.txt && \
make -j $BUILD_THREADS && \ cmake .. && \
mkdir -p /app && \ make -j $BUILD_THREADS
cp -R ./* /app && \
rm -rf /build RUN mkdir -p /build/build/res/maps/navmeshes/ && \
unzip /build/resources/navmeshes.zip -d /build/build/res/maps
FROM debian:11-slim as runtime
WORKDIR /app WORKDIR /app
COPY --from=build /build/build /app
COPY --from=build /build/migrations /app/migrations
RUN --mount=type=cache,target=/var/cache/apt \
apt update && \
apt install mariadb-client python3 sqlite3 ca-certificates wget -yqq --no-install-recommends && \
rm -rf /var/lib/apt/lists/* && \
update-ca-certificates && \
wget https://dev.mysql.com/get/Downloads/Connector-C++/libmysqlcppconn9_8.0.27-1debian11_amd64.deb -O /tmp/libmysqlcppconn.deb && \
dpkg -i /tmp/libmysqlcppconn.deb && \
rm /tmp/libmysqlcppconn.deb
ADD docker/*.py /app/utils/ ADD docker/*.py /app/utils/
COPY docker/start_server.sh /start_server.sh COPY docker/start_server.sh /start_server.sh

217
docker/fdb_to_sqlite.py Normal file
View File

@ -0,0 +1,217 @@
"""Module for converting a FDB database to a SQLite database"""
import argparse
import os
import sqlite3
import struct
from collections import OrderedDict
# There seems to be no difference between 4 and 8, but just in case there is I'm keeping that type info
SQLITE_TYPE = {}
SQLITE_TYPE[0] = "none"
SQLITE_TYPE[1] = "int32"
SQLITE_TYPE[3] = "real"
SQLITE_TYPE[4] = "text_4"
SQLITE_TYPE[5] = "int_bool"
SQLITE_TYPE[6] = "int64"
SQLITE_TYPE[8] = "text_8"
def pointer_scope(func):
"""The FDB format has a lot of pointers to structures, so this decorator automatically reads the pointer, seeks to the pointer position, calls the function, and seeks back."""
def wrapper(self, *args, **kwargs):
pointer = kwargs.get("pointer")
if pointer == None:
pointer = self._read_int32()
else:
del kwargs["pointer"]
if pointer == -1:
return
current_pos = self.fdb.tell()
self.fdb.seek(pointer)
result = func(self, *args, **kwargs)
self.fdb.seek(current_pos)
return result
return wrapper
# I'm using a class for this to save things like the fdb and the sqlite without using globals
class convert:
def __init__(self, in_file, out_file=None, add_link_info=False):
self.add_link_info = add_link_info
if out_file == None:
out_file = os.path.splitext(os.path.basename(in_file))[
0] + ".sqlite"
if os.path.exists(out_file):
os.remove(out_file)
self.fdb = open(in_file, "rb")
self.sqlite = sqlite3.connect(out_file)
self._read()
print("-"*79)
print("Finished converting database!")
print("-"*79)
self.sqlite.commit()
self.sqlite.close()
self.fdb.close()
def _read(self):
number_of_tables = self._read_int32()
self._read_tables(number_of_tables)
@pointer_scope
def _read_tables(self, number_of_tables):
for table_struct_index in range(number_of_tables):
table_name, number_of_columns = self._read_column_header()
print("[%2i%%] Reading table %s" %
(table_struct_index*100//number_of_tables, table_name))
self._read_row_header(table_name, number_of_columns)
@pointer_scope
def _read_column_header(self):
number_of_columns = self._read_int32()
table_name = self._read_string()
columns = self._read_columns(number_of_columns)
sql = "create table if not exists '%s' (%s)" % \
(table_name, ", ".join(
["'%s' %s" % (col, SQLITE_TYPE[columns[col]]) for col in columns]))
self.sqlite.execute(sql)
return table_name, len(columns)
@pointer_scope
def _read_columns(self, number_of_columns):
columns = OrderedDict()
for _ in range(number_of_columns):
data_type = self._read_int32()
name = self._read_string()
columns[name] = data_type
if self.add_link_info:
columns["_linked_from"] = 1
columns["_does_link"] = 5
columns["_invalid"] = 5
return columns
@pointer_scope
def _read_row_header(self, table_name, number_of_columns):
number_of_allocated_rows = self._read_int32()
if number_of_allocated_rows != 0:
# assert power of 2 allocation size
assert number_of_allocated_rows & (
number_of_allocated_rows - 1) == 0
self.sqlite.executemany("insert into '%s' values (%s)" % (table_name, ", ".join(
["?"] * number_of_columns)), self._read_rows(number_of_allocated_rows, number_of_columns))
@pointer_scope
def _read_rows(self, number_of_allocated_rows, number_of_columns):
rowid = 0
percent_read = -1 # -1 so 0% is displayed as new
for row in range(number_of_allocated_rows):
new_percent_read = row*100//number_of_allocated_rows
if new_percent_read > percent_read:
percent_read = new_percent_read
print("[%2i%%] Reading rows" % percent_read, end="\r")
row_pointer = self._read_int32()
if row_pointer == -1:
if self.add_link_info:
# invalid row
yield (None,) * (number_of_columns-1) + (True,)
rowid += 1
else:
linked_rows, rowid = self._read_row(rowid, pointer=row_pointer)
for values in linked_rows:
yield values
@pointer_scope
def _read_row(self, rowid):
rows = []
linked_from = None
while True:
row_values = self._read_row_info()
linked = self._read_int32()
if self.add_link_info:
row_values.append(linked_from)
row_values.append(linked != -1)
row_values.append(False) # valid row
rows.append(row_values)
rowid += 1
if linked == -1:
break
self.fdb.seek(linked)
linked_from = rowid
return rows, rowid
@pointer_scope
def _read_row_info(self):
number_of_columns = self._read_int32()
return self._read_row_values(number_of_columns)
@pointer_scope
def _read_row_values(self, number_of_columns):
values = []
for _ in range(number_of_columns):
data_type = self._read_int32()
if data_type == 0:
assert self.fdb.read(4) == b"\0\0\0\0"
value = None
elif data_type == 1:
value = self._read_int32()
elif data_type == 3:
value = struct.unpack("f", self.fdb.read(4))[0]
elif data_type in (4, 8):
value = self._read_string()
elif data_type == 5:
value = struct.unpack("?xxx", self.fdb.read(4))[0]
elif data_type == 6:
value = self._read_int64()
else:
raise NotImplementedError(data_type)
values.append(value)
return values
def _read_int32(self):
return struct.unpack("i", self.fdb.read(4))[0]
@pointer_scope
def _read_string(self):
str_bytes = bytearray()
while True:
byte = self.fdb.read(1)
if byte == b"\0":
break
str_bytes += byte
return str_bytes.decode("latin1")
@pointer_scope
def _read_int64(self):
return struct.unpack("q", self.fdb.read(8))[0]
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("fdb_path")
parser.add_argument("--sqlite_path")
parser.add_argument("--add_link_info", action="store_true")
args = parser.parse_args()
convert(args.fdb_path, args.sqlite_path, args.add_link_info)

View File

@ -48,6 +48,40 @@ function update_ini_values() {
update_ini worldconfig.ini max_clients $MAX_CLIENTS update_ini worldconfig.ini max_clients $MAX_CLIENTS
} }
function symlink_client_files() {
ln -s /client/client/res/macros/ /app/res/macros
ln -s /client/client/res/BrickModels/ /app/res/BrickModels
ln -s /client/client/res/chatplus_en_us.txt /app/res/chatplus_en_us.txt
ln -s /client/client/res/names/ /app/res/names
ln -s /client/client/res/maps/ /app/res/maps
ln -s /client/client/locale/locale.xml /app/locale/locale.xml
}
function fdb_to_sqlite() {
echo "Run fdb_to_sqlite"
python3 /app/utils/fdb_to_sqlite.py /client/client/res/CDClient.fdb --sqlite_path /app/res/CDServer.sqlite
(
cd /app/migrations/cdserver
readarray -d '' entries < <(printf '%s\0' *.sql | sort -zV)
for entry in "${entries[@]}"; do
echo "Execute $entry"
sqlite3 ../../res/CDServer.sqlite < $entry
done
)
}
function run_db_migrations() {
(
cd /app/migrations/dlu
readarray -d '' entries < <(printf '%s\0' *.sql | sort -zV)
for entry in "${entries[@]}"; do
echo "Execute $entry"
mysql -h"$DATABASE_HOST" -P"$DATABASE_PORT" -u"$DATABASE_USER" -p"$DATABASE_PASSWORD" $DATABASE < $entry
done
)
}
set_defaults set_defaults
check_sql_connection check_sql_connection
@ -64,14 +98,21 @@ if [[ ! -f "/client/extracted" ]]; then
echo "Start client resource extraction" echo "Start client resource extraction"
python3 /app/utils/pkextractor.py /client/ /client/ python3 /app/utils/pkextractor.py /client/ /client/
touch /client/extracted touch /client/extracted
else else
echo "Client already extracted. Skip this step" echo "Client already extracted. Skip this step"
echo "If you want to force re-extract, just delete the file called \"extracted\" in the client directory" echo "If you want to force re-extract, just delete the file called \"extracted\" in the client directory"
fi fi
while [[ 1 ]]; do symlink_client_files
sleep 1
echo "Hello" fdb_to_sqlite
done
run_db_migrations
echo "Start MasterServer"
./MasterServer
tail -f /dev/null