diff --git a/.github/workflows/build-mod.yml b/.github/workflows/build-mod.yml index ff102b50..ca4d9c51 100644 --- a/.github/workflows/build-mod.yml +++ b/.github/workflows/build-mod.yml @@ -1,4 +1,4 @@ -name: Build Mod +name: "Build Mods" on: push: @@ -7,39 +7,20 @@ on: pull_request: paths: - "mod/**/*" + workflow_call: jobs: build: - runs-on: ubuntu-latest + runs-on: "ubuntu-latest" steps: - - uses: actions/checkout@v3 - - name: Set up JDK 17 - uses: actions/setup-java@v3 + - uses: "actions/checkout@v4" + + - name: "Setting up JDK 17" + uses: "actions/setup-java@v4" with: java-version: "17" distribution: "adopt" - - run: ./gradlew build - working-directory: ./mod - - - name: Upload Forge Build - uses: actions/upload-artifact@v3 - with: - name: Forge - path: mod/dist/*-forge.jar - - name: Upload Fabric Build - uses: actions/upload-artifact@v3 - with: - name: Fabric - path: mod/dist/*-fabric.jar - - - name: Release Tag - if: startsWith(github.ref, 'refs/tags/v') - uses: softprops/action-gh-release@v1 - with: - prerelease: true - fail_on_unmatched_files: true - files: | - mod/dist/*.jar - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: "Building mods" + working-directory: "./mod" + run: "./gradlew build" diff --git a/.github/workflows/build-server.yml b/.github/workflows/build-server.yml index eacd5eed..4b961e9b 100644 --- a/.github/workflows/build-server.yml +++ b/.github/workflows/build-server.yml @@ -1,4 +1,4 @@ -name: Build+Test Server +name: "Build+Test Server" on: push: @@ -7,23 +7,31 @@ on: pull_request: paths: - "server/**/*" + workflow_call: jobs: build: - runs-on: ubuntu-latest - strategy: - matrix: - version: ["lts/*", "latest"] + runs-on: "ubuntu-latest" steps: - - uses: actions/checkout@v3 - - name: Use latest Node.js LTS - uses: actions/setup-node@v3 + - uses: "actions/checkout@v4" + + - name: "Setting up Bun" + uses: oven-sh/setup-bun@v2 with: - node-version: ${{ matrix.version }} - # cache: "yarn" - - run: yarn - working-directory: ./server - - run: yarn build - working-directory: ./server - - run: yarn test - working-directory: ./server + bun-version: latest + + - name: "Installing dependencies" + working-directory: "./server" + run: "bun install" + + - name: "Checking types" + working-directory: "./server" + run: "bun run check:types" + + - name: "Checking style" + working-directory: "./server" + run: "bun run check:style" + + - name: "Running tests" + working-directory: "./server" + run: "bun run test" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..1566bcc5 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,35 @@ +name: "Publishing to release" + +on: + release: + types: + - "published" + +permissions: + contents: "write" + +jobs: + release-mod: + runs-on: "ubuntu-latest" + steps: + - uses: "actions/checkout@v4" + + - name: "Setting up JDK 17" + uses: "actions/setup-java@v4" + with: + java-version: "17" + distribution: "adopt" + + - name: "Building mods" + working-directory: "./mod" + run: "./gradlew build" + + - name: "Publishing mods" + working-directory: "./mod" + run: | + for file in $(find "dist/" -maxdepth 1 -type f -name "*.jar"); do + echo "Uploading $file" + gh release upload ${{ github.event.release.tag_name }} "$file" --clobber + done + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/Dockerfile b/Dockerfile index 7714d197..77e4b138 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,6 @@ # base is shared between build/test and deploy -FROM node:18-alpine AS base +# See options at: https://hub.docker.com/r/oven/bun +FROM oven/bun:latest AS base WORKDIR /usr/src/app/ @@ -8,29 +9,28 @@ COPY ./server/package.json /usr/src/app/package.json FROM base AS build -COPY ./server/yarn.lock /usr/src/app/yarn.lock -RUN yarn +COPY ./server/bun.lock /usr/src/app/bun.lock +COPY ./server/bunfig.toml /usr/src/app/bunfig.toml +RUN bun install # copy source as late as possible, to reuse docker cache with node_modules COPY ./server /usr/src/app -RUN yarn build - -FROM build AS test -RUN yarn test # final image only includes minimal files FROM base AS deploy +COPY --from=build /usr/src/app/bun.lock /usr/src/app/bun.lock +COPY --from=build /usr/src/app/bunfig.toml /usr/src/app/bunfig.toml COPY --from=build /usr/src/app/node_modules /usr/src/app/node_modules -COPY --from=build /usr/src/app/dist /usr/src/app/dist +COPY --from=build /usr/src/app/src /usr/src/app/src ENV NODE_ENV=production ENV HOST=0.0.0.0 #Mount your FS or volume or whatnot to this folder -RUN mkdir /data +# TODO: Fix env override of config data ENV MAPSYNC_DATA_DIR=/data -EXPOSE 12312/tcp +# EXPOSE 12312/tcp -CMD [ "yarn", "start" ] +CMD [ "bun", "run", "start" ] diff --git a/README.md b/README.md index 5533744f..857dafca 100644 --- a/README.md +++ b/README.md @@ -46,11 +46,10 @@ You can control who has access to a Sync Server by editing its `allowed-users.tx System Install
-- install recent nodejs (~17) +- install [Bun](https://bun.sh/) - clone code, `cd server` -- `npm install` -- `npm run build` -- this has to be run after every time the code is edited -- `npm run start` +- `bun install` +- `bun start` - to stop, press Ctrl+C twice diff --git a/mod/common/build.gradle b/mod/common/build.gradle index 999ab4b2..b40281c0 100644 --- a/mod/common/build.gradle +++ b/mod/common/build.gradle @@ -20,6 +20,9 @@ dependencies { modCompileOnly("maven.modrinth:journeymap:5JbcGXLn") // https://modrinth.com/mod/xaeros-minimap/version/23.6.2_Fabric_1.18.2 (23.6.2 fabric) modCompileOnly("maven.modrinth:xaeros-minimap:Jwydpps9") + + // https://github.com/TooTallNate/Java-WebSocket + compileOnly("org.java-websocket:Java-WebSocket:1.6.0") } tasks { diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/CatchupLogic.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/CatchupLogic.java index d75d001a..b71d134b 100644 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/CatchupLogic.java +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/CatchupLogic.java @@ -32,7 +32,7 @@ public void addCatchupChunks(List catchupChunks) { if (catchupChunks.isEmpty()) return; var catchupDim = catchupChunks.get(0).dimension(); if (!dimensionState.dimension.equals(catchupDim)) { - logger.warn("Catchup chunks from wrong dimension " + catchupDim + ", expected " + dimensionState.dimension); + LOGGER.warn("Catchup chunks from wrong dimension " + catchupDim + ", expected " + dimensionState.dimension); return; } synchronized (this.catchupChunks) { diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/MapSyncMod.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/MapSyncMod.java index ce47224e..c67236d9 100644 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/MapSyncMod.java +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/MapSyncMod.java @@ -4,13 +4,16 @@ import gjum.minecraft.mapsync.common.config.ModConfig; import gjum.minecraft.mapsync.common.config.ServerConfig; import gjum.minecraft.mapsync.common.data.*; +import gjum.minecraft.mapsync.common.net.SyncAddress; import gjum.minecraft.mapsync.common.net.SyncClient; import gjum.minecraft.mapsync.common.net.packet.*; +import java.util.stream.Collectors; import net.minecraft.client.KeyMapping; import net.minecraft.client.Minecraft; import net.minecraft.client.multiplayer.ServerData; import net.minecraft.network.protocol.game.ClientboundLoginPacket; import net.minecraft.network.protocol.game.ClientboundRespawnPacket; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.jetbrains.annotations.NotNull; @@ -18,7 +21,6 @@ import org.lwjgl.glfw.GLFW; import java.util.*; -import java.util.stream.Collectors; import static gjum.minecraft.mapsync.common.Cartography.chunkTileFromLevel; @@ -27,7 +29,7 @@ public abstract class MapSyncMod { private static final Minecraft mc = Minecraft.getInstance(); - public static final Logger logger = LogManager.getLogger(MapSyncMod.class); + public static final Logger LOGGER = LogManager.getLogger(MapSyncMod.class); private static MapSyncMod INSTANCE; @@ -123,30 +125,41 @@ public void handleRespawn(ClientboundRespawnPacket packet) { if (syncServerAddresses.isEmpty()) return shutDownSyncClients(); // will be filled with clients that are still wanted (address) and are still connected - var existingClients = new HashMap(); + var existingClients = new HashMap(); - for (SyncClient client : syncClients) { - if (client.isShutDown) continue; + for (final SyncClient client : this.syncClients) { + if (client.isShutDown) { + continue; + } // avoid reconnecting to same sync server, to keep shared state (expensive to resync) - if (!client.gameAddress.equals(serverConfig.gameAddress)) { - debugLog("Disconnecting sync client; different game server"); + if (!StringUtils.equals(client.gameAddress, serverConfig.gameAddress)) { + LOGGER.warn("Disconnecting sync client; different game server"); client.shutDown(); - } else if (!syncServerAddresses.contains(client.address)) { - debugLog("Disconnecting sync client; different sync address"); + } + else if (!syncServerAddresses.contains(client.syncAddress.toString())) { + LOGGER.warn("Disconnecting sync client; different sync address"); client.shutDown(); - } else { - existingClients.put(client.address, client); + } + else { + existingClients.put(client.syncAddress, client); } } - syncClients = syncServerAddresses.stream().map(address -> { - var client = existingClients.get(address); - if (client == null) client = new SyncClient(address, serverConfig.gameAddress); - client.autoReconnect = true; - return client; - }).collect(Collectors.toList()); - - return syncClients; + this.syncClients = syncServerAddresses.stream() + .map(SyncAddress::of) + .filter(Objects::nonNull) + .distinct() + .map((address) -> { + SyncClient client = existingClients.get(address); + if (client == null) { + client = new SyncClient(address, serverConfig.gameAddress); + } + client.autoReconnect = true; + return client; + }) + .collect(Collectors.toCollection(ArrayList::new)); + + return this.syncClients; } public List shutDownSyncClients() { @@ -214,11 +227,6 @@ public void handleMcChunkPartialChange(int cx, int cz) { // TODO update ChunkTile in a second or so; remember dimension in case it changes til then } - public void handleSyncServerEncryptionSuccess() { - debugLog("tcp encrypted"); - // TODO tell server our current dimension - } - public void handleRegionTimestamps(ClientboundRegionTimestampsPacket packet, SyncClient client) { DimensionState dimension = getDimensionState(); if (dimension == null) return; @@ -258,7 +266,7 @@ public void handleSharedChunk(ChunkTile chunkTile) { public void handleCatchupData(ClientboundChunkTimestampsResponsePacket packet) { var dimensionState = getDimensionState(); if (dimensionState == null) return; - debugLog("received catchup: " + packet.chunks.size() + " " + packet.chunks.get(0).syncClient.address); + debugLog("received catchup: " + packet.chunks.size() + " " + packet.chunks.get(0).syncClient.syncAddress); dimensionState.addCatchupChunks(packet.chunks); } @@ -269,9 +277,9 @@ public void requestCatchupData(List chunks) { } debugLog("requesting more catchup: " + chunks.size()); - var byServer = new HashMap>(); + var byServer = new HashMap>(); for (CatchupChunk chunk : chunks) { - var list = byServer.computeIfAbsent(chunk.syncClient.address, (a) -> new ArrayList<>()); + var list = byServer.computeIfAbsent(chunk.syncClient.syncAddress, (a) -> new ArrayList<>()); list.add(chunk); } for (List chunksForServer : byServer.values()) { @@ -283,7 +291,7 @@ public void requestCatchupData(List chunks) { public static void debugLog(String msg) { // we could also make use of slf4j's debug() but I don't know how to reconfigure that at runtime based on globalConfig if (modConfig.isShowDebugLog()) { - logger.info(msg); + LOGGER.info(msg); } } } diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/ModGui.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/ModGui.java index a81215c8..3703c17e 100644 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/ModGui.java +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/ModGui.java @@ -2,15 +2,17 @@ import com.mojang.blaze3d.vertex.PoseStack; import gjum.minecraft.mapsync.common.config.ServerConfig; +import java.util.ArrayList; +import java.util.stream.Collectors; +import java.util.stream.Stream; import net.minecraft.client.Minecraft; import net.minecraft.client.gui.components.Button; import net.minecraft.client.gui.components.EditBox; import net.minecraft.client.gui.screens.Screen; import net.minecraft.network.chat.TextComponent; +import org.apache.commons.lang3.StringUtils; import org.jetbrains.annotations.NotNull; -import java.util.List; - import static gjum.minecraft.mapsync.common.MapSyncMod.getMod; public class ModGui extends Screen { @@ -78,8 +80,12 @@ protected void init() { public void connectClicked(Button btn) { try { if (syncServerAddressField == null) return; - var addresses = List.of(syncServerAddressField.getValue().split("[^-_.:A-Za-z0-9]+")); - serverConfig.setSyncServerAddresses(addresses); + serverConfig.setSyncServerAddresses( + Stream.of(StringUtils.split(syncServerAddressField.getValue(), ",")) + .map(String::trim) + .filter(StringUtils::isNotEmpty) + .collect(Collectors.toCollection(ArrayList::new)) + ); getMod().shutDownSyncClients(); getMod().getSyncClients(); btn.active = false; @@ -117,7 +123,7 @@ public void render(@NotNull PoseStack poseStack, int i, int j, float f) { for (var client : syncClients) { int statusColor; String statusText; - if (client.isEncrypted()) { + if (client.isEstablished()) { numConnected++; statusColor = 0x008800; statusText = "Connected"; @@ -128,7 +134,7 @@ public void render(@NotNull PoseStack poseStack, int i, int j, float f) { statusColor = 0xffffff; statusText = "Connecting..."; } - statusText = client.address + " " + statusText; + statusText = client.syncAddress + " " + statusText; drawString(poseStack, font, statusText, left, msgY, statusColor); msgY += 10; } diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ClientHandler.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ClientHandler.java deleted file mode 100644 index 37e2fa90..00000000 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ClientHandler.java +++ /dev/null @@ -1,62 +0,0 @@ -package gjum.minecraft.mapsync.common.net; - -import gjum.minecraft.mapsync.common.data.CatchupChunk; -import gjum.minecraft.mapsync.common.net.packet.*; -import io.netty.channel.ChannelHandlerContext; -import io.netty.channel.ChannelInboundHandlerAdapter; - -import java.io.IOException; -import java.net.ConnectException; - -import static gjum.minecraft.mapsync.common.MapSyncMod.getMod; - -/** - * tightly coupled to {@link SyncClient} - */ -public class ClientHandler extends ChannelInboundHandlerAdapter { - private final SyncClient client; - - public ClientHandler(SyncClient client) { - this.client = client; - } - - @Override - public void channelRead(ChannelHandlerContext ctx, Object packet) { - try { - if (!client.isEncrypted()) { - if (packet instanceof ClientboundEncryptionRequestPacket pktEncryptionRequest) { - client.setUpEncryption(ctx, pktEncryptionRequest); - } else throw new Error("Expected encryption request, got " + packet); - } else if (packet instanceof ChunkTilePacket pktChunkTile) { - getMod().handleSharedChunk(pktChunkTile.chunkTile); - } else if (packet instanceof ClientboundRegionTimestampsPacket pktRegionTimestamps) { - getMod().handleRegionTimestamps(pktRegionTimestamps, client); - } else if (packet instanceof ClientboundChunkTimestampsResponsePacket pktCatchup) { - for (CatchupChunk chunk : pktCatchup.chunks) { - chunk.syncClient = this.client; - } - getMod().handleCatchupData((ClientboundChunkTimestampsResponsePacket) packet); - } else throw new Error("Expected packet, got " + packet); - } catch (Throwable err) { - err.printStackTrace(); - ctx.close(); - } - } - - @Override - public void exceptionCaught(ChannelHandlerContext ctx, Throwable err) throws Exception { - if (err instanceof IOException && "Connection reset by peer".equals(err.getMessage())) return; - if (err instanceof ConnectException && err.getMessage().startsWith("Connection refused: ")) return; - - SyncClient.logger.info("[map-sync] Network Error: " + err); - err.printStackTrace(); - ctx.close(); - super.exceptionCaught(ctx, err); - } - - @Override - public void channelInactive(ChannelHandlerContext ctx) throws Exception { - client.handleDisconnect(new RuntimeException("Channel inactive")); - super.channelInactive(ctx); - } -} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ClientboundPacketDecoder.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ClientboundPacketDecoder.java deleted file mode 100644 index aac61eb0..00000000 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ClientboundPacketDecoder.java +++ /dev/null @@ -1,37 +0,0 @@ -package gjum.minecraft.mapsync.common.net; - -import gjum.minecraft.mapsync.common.net.packet.*; -import io.netty.buffer.ByteBuf; -import io.netty.channel.ChannelHandlerContext; -import io.netty.handler.codec.ReplayingDecoder; -import org.jetbrains.annotations.Nullable; - -import java.util.List; - -public class ClientboundPacketDecoder extends ReplayingDecoder { - public static @Nullable Packet constructServerPacket(int id, ByteBuf buf) { - if (id == ChunkTilePacket.PACKET_ID) return ChunkTilePacket.read(buf); - if (id == ClientboundEncryptionRequestPacket.PACKET_ID) return ClientboundEncryptionRequestPacket.read(buf); - if (id == ClientboundChunkTimestampsResponsePacket.PACKET_ID) return ClientboundChunkTimestampsResponsePacket.read(buf); - if (id == ClientboundRegionTimestampsPacket.PACKET_ID) return ClientboundRegionTimestampsPacket.read(buf); - return null; - } - - @Override - protected void decode(ChannelHandlerContext ctx, ByteBuf buf, List out) { - try { - byte id = buf.readByte(); - final Packet packet = constructServerPacket(id, buf); - if (packet == null) { - SyncClient.logger.error("[ServerPacketDecoder] " + - "Unknown server packet id " + id + " 0x" + Integer.toHexString(id)); - ctx.close(); - return; - } - out.add(packet); - } catch (Throwable err) { - err.printStackTrace(); - ctx.close(); - } - } -} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/Packet.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/Packet.java index c9672085..70cc5669 100644 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/Packet.java +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/Packet.java @@ -81,4 +81,13 @@ static void writeResourceKey( resourceKey.location().toString() ); } + + static void assertNoRemainder( + final @NotNull ByteBuf in + ) { + final int remainder = in.readableBytes(); + if (remainder > 0) { + throw new IllegalStateException("Found [" + remainder + "] remaining bytes!"); + } + } } diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ServerboundPacketEncoder.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ServerboundPacketEncoder.java deleted file mode 100644 index 56f8b746..00000000 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ServerboundPacketEncoder.java +++ /dev/null @@ -1,28 +0,0 @@ -package gjum.minecraft.mapsync.common.net; - -import gjum.minecraft.mapsync.common.net.packet.*; -import io.netty.buffer.ByteBuf; -import io.netty.channel.ChannelHandlerContext; -import io.netty.handler.codec.MessageToByteEncoder; - -public class ServerboundPacketEncoder extends MessageToByteEncoder { - public static int getClientPacketId(Packet packet) { - if (packet instanceof ChunkTilePacket) return ChunkTilePacket.PACKET_ID; - if (packet instanceof ServerboundHandshakePacket) return ServerboundHandshakePacket.PACKET_ID; - if (packet instanceof ServerboundEncryptionResponsePacket) return ServerboundEncryptionResponsePacket.PACKET_ID; - if (packet instanceof ServerboundCatchupRequestPacket) return ServerboundCatchupRequestPacket.PACKET_ID; - if (packet instanceof ServerboundChunkTimestampsRequestPacket) return ServerboundChunkTimestampsRequestPacket.PACKET_ID; - throw new IllegalArgumentException("Unknown client packet class " + packet); - } - - @Override - protected void encode(ChannelHandlerContext ctx, Packet packet, ByteBuf out) { - try { - out.writeByte(getClientPacketId(packet)); - packet.write(out); - } catch (Throwable err) { - err.printStackTrace(); - ctx.close(); - } - } -} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncAddress.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncAddress.java new file mode 100644 index 00000000..66e5c24f --- /dev/null +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncAddress.java @@ -0,0 +1,69 @@ +package gjum.minecraft.mapsync.common.net; + +import java.net.URI; +import java.net.URISyntaxException; +import org.apache.http.client.utils.URIBuilder; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +public record SyncAddress( + @NotNull URI address +) { + public SyncAddress( + final @NotNull URI address + ) { + final var builder = new URIBuilder(); + + final String scheme = address.getScheme(); + if (scheme == null) { + throw new IllegalArgumentException("Must specify a scheme (ws/wss)!"); + } + builder.setScheme(switch (scheme) { + case "ws", "wss", "http", "https" -> scheme; + default -> throw new IllegalArgumentException("Only ws/wss is permitted!"); + }); + builder.setHost(address.getHost()); + builder.setPort(address.getPort()); + + try { + this.address = builder.build(); + } + catch (final URISyntaxException e) { + throw new IllegalArgumentException(e); + } + } + + @Override + public @NotNull String toString() { + return address().toString(); + } + + public static @Nullable SyncAddress of( + final URI syncAddress + ) { + if (syncAddress == null) { + return null; + } + try { + return new SyncAddress(syncAddress); + } + catch (final IllegalArgumentException e) { + return null; + } + } + + public static @Nullable SyncAddress of( + String syncAddress + ) { + if (syncAddress == null) { + return null; + } + syncAddress = syncAddress.trim(); + try { + return of(new URI(syncAddress)); + } + catch (final URISyntaxException e) { + return null; + } + } +} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncClient.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncClient.java index 5c3f9d55..5e380a87 100644 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncClient.java +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncClient.java @@ -2,36 +2,42 @@ import com.mojang.authlib.exceptions.AuthenticationException; import gjum.minecraft.mapsync.common.MapSyncMod; +import gjum.minecraft.mapsync.common.data.CatchupChunk; import gjum.minecraft.mapsync.common.data.ChunkTile; -import gjum.minecraft.mapsync.common.net.encryption.EncryptionDecoder; -import gjum.minecraft.mapsync.common.net.encryption.EncryptionEncoder; -import gjum.minecraft.mapsync.common.net.packet.*; +import gjum.minecraft.mapsync.common.net.packet.ChunkTilePacket; +import gjum.minecraft.mapsync.common.net.packet.ClientboundChunkTimestampsResponsePacket; +import gjum.minecraft.mapsync.common.net.packet.ClientboundAuthRequestPacket; +import gjum.minecraft.mapsync.common.net.packet.ClientboundRegionTimestampsPacket; +import gjum.minecraft.mapsync.common.net.packet.ClientboundWelcomePacket; +import gjum.minecraft.mapsync.common.net.packet.ServerboundCatchupRequestPacket; +import gjum.minecraft.mapsync.common.net.packet.ServerboundChunkTimestampsRequestPacket; +import gjum.minecraft.mapsync.common.net.packet.ServerboundAuthResponsePacket; +import gjum.minecraft.mapsync.common.net.packet.ServerboundHandshakePacket; import gjum.minecraft.mapsync.common.utils.Hasher; -import io.netty.bootstrap.Bootstrap; -import io.netty.channel.*; -import io.netty.channel.nio.NioEventLoopGroup; -import io.netty.channel.socket.SocketChannel; -import io.netty.channel.socket.nio.NioSocketChannel; -import io.netty.handler.codec.LengthFieldBasedFrameDecoder; -import io.netty.handler.codec.LengthFieldPrepender; +import gjum.minecraft.mapsync.common.utils.MagicValues; +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HexFormat; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.ThreadLocalRandom; import net.minecraft.client.Minecraft; import net.minecraft.client.User; import net.minecraft.world.level.ChunkPos; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; +import org.apache.commons.lang3.StringUtils; +import org.java_websocket.client.WebSocketClient; +import org.java_websocket.drafts.Draft_6455; +import org.java_websocket.enums.ReadyState; +import org.java_websocket.handshake.ServerHandshake; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; - -import javax.crypto.*; -import javax.crypto.spec.SecretKeySpec; -import java.security.*; -import java.util.*; -import java.util.concurrent.ThreadLocalRandom; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import static gjum.minecraft.mapsync.common.MapSyncMod.debugLog; -import static gjum.minecraft.mapsync.common.MapSyncMod.getMod; +import org.jetbrains.annotations.UnknownNullability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * handles reconnection, authentication, encryption @@ -42,7 +48,7 @@ public class SyncClient { public synchronized void sendChunkTile(ChunkTile chunkTile) { var serverKnownHash = getServerKnownChunkHash(chunkTile.chunkPos()); if (Arrays.equals(chunkTile.dataHash(), serverKnownHash)) { - debugLog("server already has chunk (hash) " + chunkTile.chunkPos()); + MapSyncMod.debugLog("server already has chunk (hash) " + chunkTile.chunkPos()); return; // server already has this chunk } @@ -62,11 +68,10 @@ public synchronized void setServerKnownChunkHash(ChunkPos chunkPos, byte[] hash) // XXX end of hotfix - public static final Logger logger = LogManager.getLogger(SyncClient.class); - - public int retrySec = 5; + public static final Logger LOGGER = LoggerFactory.getLogger(SyncClient.class); + public static final int RESTART_DELAY = 5; - public final @NotNull String address; + public final @NotNull SyncAddress syncAddress; public final @NotNull String gameAddress; /** @@ -79,118 +84,158 @@ public synchronized void setServerKnownChunkHash(ChunkPos chunkPos, byte[] hash) * and disconnect when coming across this during a check */ public boolean isShutDown = false; - private boolean isEncrypted = false; private @Nullable String lastError; /** * limited (on insert) to 199 entries */ - private ArrayList queue = new ArrayList<>(); - private @Nullable Channel channel; - private static @Nullable NioEventLoopGroup workerGroup; - - public SyncClient(@NotNull String address, @NotNull String gameAddress) { - if (!address.contains(":")) address = address + ":12312"; - this.address = address; - this.gameAddress = gameAddress; - connect(); + private final ArrayList queue = new ArrayList<>(); + private final SyncConnection connection; + /** Whether the connection has survived the handshake and login exchange */ + private boolean isEstablished = false; + + public SyncClient( + final @NotNull SyncAddress syncAddress, + final @NotNull String gameAddress + ) { + this.syncAddress = Objects.requireNonNull(syncAddress); + this.gameAddress = Objects.requireNonNull(gameAddress); + this.connection = new SyncConnection(syncAddress); + this.connection.connect(); } - private void connect() { - try { - if (isShutDown) return; - - if (workerGroup != null && !workerGroup.isShuttingDown()) { - // end any tasks of the old connection - workerGroup.shutdownGracefully(); - } - workerGroup = new NioEventLoopGroup(); - isEncrypted = false; - - var bootstrap = new Bootstrap(); - bootstrap.group(workerGroup); - bootstrap.channel(NioSocketChannel.class); - bootstrap.option(ChannelOption.SO_KEEPALIVE, true); - bootstrap.handler(new ChannelInitializer() { - public void initChannel(SocketChannel ch) { - ch.pipeline().addLast( - new LengthFieldPrepender(4), - new LengthFieldBasedFrameDecoder(1 << 15, 0, 4, 0, 4), - new ClientboundPacketDecoder(), - new ServerboundPacketEncoder(), - new ClientHandler(SyncClient.this)); - } - }); - - String[] hostPortArr = address.split(":"); - int port = Integer.parseInt(hostPortArr[1]); - - final var channelFuture = bootstrap.connect(hostPortArr[0], port); - channel = channelFuture.channel(); - channelFuture.addListener(future -> { - if (future.isSuccess()) { - logger.info("[map-sync] Connected to " + address); - channelFuture.channel().writeAndFlush(new ServerboundHandshakePacket( - getMod().getVersion(), - Minecraft.getInstance().getUser().getName(), - gameAddress, - getMod().getDimensionState().dimension.location().toString())); - } else { - handleDisconnect(future.cause()); + private class SyncConnection extends WebSocketClient { + public SyncConnection( + final @NotNull SyncAddress serverUri + ) { + super( + serverUri.address(), + new Draft_6455( + List.of(), // plugins + List.of(), // protocols + MagicValues.MAX_WS_FRAME_SIZE + ) + ); + } + @Override + public void onOpen( + final @NotNull ServerHandshake handshake + ) { + LOGGER.info("[map-sync] OPENED!"); + INTERNAL_send(new ServerboundHandshakePacket( + MapSyncMod.getMod().getVersion(), + Minecraft.getInstance().getUser().getName(), + SyncClient.this.gameAddress, + MapSyncMod.getMod().getDimensionState().dimension.location().toString() + )); + } + @Override + public void onClose( + final int code, + final @UnknownNullability String reason, + final boolean remote + ) { + LOGGER.info("[map-sync] Closed!"); + SyncClient.this.handleDisconnect(code, reason, remote); + } + @Override + public void onError( + final @NotNull Exception thrown + ) { + LOGGER.warn("[map-sync] Something went wrong", thrown); + SyncClient.this.lastError = thrown.getMessage(); + close(); + } + @Override + public void onMessage( + final @NotNull String message + ) { + LOGGER.warn("[map-sync] Received a string message from the server!"); + SyncClient.this.lastError = "Server sent unsupported packets!"; + SyncClient.this.autoReconnect = false; + SyncClient.this.isShutDown = true; + SyncClient.this.isEstablished = false; + close(); + } + @Override + public void onMessage( + @NotNull ByteBuffer bytes + ) { + LOGGER.info("[map-sync] Received bytes!"); + final ByteBuf buf = Unpooled.wrappedBuffer(bytes); + try { + final byte packetId = buf.readByte(); + switch (packetId) { + case ChunkTilePacket.PACKET_ID -> { + final var packet = (ChunkTilePacket) ChunkTilePacket.read(buf); + Packet.assertNoRemainder(buf); + MapSyncMod.getMod().handleSharedChunk(packet.chunkTile); + } + case ClientboundAuthRequestPacket.PACKET_ID -> { + final ClientboundAuthRequestPacket packet = ClientboundAuthRequestPacket.read(buf); + Packet.assertNoRemainder(buf); + handleAuthRequest(this, packet); + } + case ClientboundWelcomePacket.PACKET_ID -> { + final ClientboundWelcomePacket packet = ClientboundWelcomePacket.read(buf); + Packet.assertNoRemainder(buf); + handleWelcome(packet); + } + case ClientboundChunkTimestampsResponsePacket.PACKET_ID -> { + final var packet = (ClientboundChunkTimestampsResponsePacket) ClientboundChunkTimestampsResponsePacket.read(buf); + Packet.assertNoRemainder(buf); + for (CatchupChunk chunk : packet.chunks) { + chunk.syncClient = SyncClient.this; + } + MapSyncMod.getMod().handleCatchupData(packet); + } + case ClientboundRegionTimestampsPacket.PACKET_ID -> { + final var packet = (ClientboundRegionTimestampsPacket) ClientboundRegionTimestampsPacket.read(buf); + Packet.assertNoRemainder(buf); + MapSyncMod.getMod().handleRegionTimestamps(packet, SyncClient.this); + } } - }); - } catch (Throwable e) { - e.printStackTrace(); - handleDisconnect(e); + } + catch (final Exception thrown) { + onError(thrown); + } } } - void handleDisconnect(Throwable err) { - isEncrypted = false; - - if (Minecraft.getInstance().level == null) shutDown(); - - String errMsg = err.getMessage(); - if (errMsg == null) errMsg = err.toString(); - lastError = errMsg; - if (isShutDown) { - logger.warn("[map-sync] Got disconnected from '" + address + "'." + - " Won't retry (has shut down)"); - if (!errMsg.contains("Channel inactive")) err.printStackTrace(); - } else if (!autoReconnect) { - logger.warn("[map-sync] Got disconnected from '" + address + "'." + - " Won't retry (autoReconnect=false)"); - if (!errMsg.contains("Channel inactive")) err.printStackTrace(); - } else if (workerGroup == null) { - logger.warn("[map-sync] Got disconnected from '" + address + "'." + - " Won't retry (workerGroup=null)"); - err.printStackTrace(); - } else { - workerGroup.schedule(this::connect, retrySec, TimeUnit.SECONDS); - - if (!errMsg.startsWith("Connection refused: ")) { // reduce spam - logger.warn("[map-sync] Got disconnected from '" + address + "'." + - " Retrying in " + retrySec + " sec"); - if (!errMsg.contains("Channel inactive")) err.printStackTrace(); - } + public synchronized void connect() { + if (this.isShutDown) { + return; } + if (this.connection.getReadyState() == ReadyState.OPEN) { + this.connection.close(); + } + this.connection.connect(); } - public synchronized void handleEncryptionSuccess() { - if (channel == null) return; + private void handleDisconnect( + final int code, + final @UnknownNullability String reason, + final boolean remote + ) { + this.isEstablished = false; + + if (Minecraft.getInstance().level == null) { + this.isShutDown = true; + } + + if (StringUtils.isNotEmpty(reason)) { + this.lastError = reason; + } - lastError = null; - isEncrypted = true; - getMod().handleSyncServerEncryptionSuccess(); + LOGGER.warn("[map-sync] Got disconnected from '{}': {}", this.syncAddress, this.lastError); - for (Packet packet : queue) { - channel.write(packet); + if (!this.isShutDown && this.autoReconnect && !remote) { + // TODO: Readd auto-reconnect + // workerGroup.schedule(this::connect, retrySec, TimeUnit.SECONDS); } - queue.clear(); - channel.flush(); } - public boolean isEncrypted() { - return isEncrypted; + public boolean isEstablished() { + return this.isEstablished; } public String getError() { @@ -200,92 +245,90 @@ public String getError() { /** * Send if encrypted, or queue and send once encryption is set up. */ - public void send(Packet packet) { - send(packet, true); - } - - /** - * Send if encrypted, or queue and send once encryption is set up. - */ - public synchronized void send(Packet packet, boolean flush) { - try { - if (isEncrypted() && channel != null && channel.isActive()) { - if (flush) channel.writeAndFlush(packet); - else channel.write(packet); - } else { - queue.add(packet); - // don't let the queue occupy too much memory - if (queue.size() > 200) { - logger.warn("[map-sync] Dropping 100 oldest packets from queue"); - queue = queue.stream() - .skip(100) - .collect(Collectors.toCollection(ArrayList::new)); - } + public synchronized void send(Packet packet) { + if (this.connection == null || this.connection.getReadyState() != ReadyState.OPEN) { + this.queue.add(packet); + final int queueSize = this.queue.size(); + if (queueSize > 200) { + final List slice = List.copyOf(this.queue.subList(100, queueSize)); + this.queue.clear(); + this.queue.addAll(slice); } - } catch (Throwable e) { - e.printStackTrace(); + return; } + INTERNAL_send(packet); + } + + private void INTERNAL_send( + final @NotNull Packet packet + ) { + final ByteBuf buf = Unpooled.buffer(); + buf.writeByte(getClientPacketId(packet)); + packet.write(buf); + + final byte[] bytes = new byte[buf.readableBytes()]; + buf.readBytes(bytes); + + this.connection.send(bytes); + } + + private static int getClientPacketId(Packet packet) { + if (packet instanceof ChunkTilePacket) return ChunkTilePacket.PACKET_ID; + if (packet instanceof ServerboundHandshakePacket) return ServerboundHandshakePacket.PACKET_ID; + if (packet instanceof ServerboundAuthResponsePacket) return ServerboundAuthResponsePacket.PACKET_ID; + if (packet instanceof ServerboundCatchupRequestPacket) return ServerboundCatchupRequestPacket.PACKET_ID; + if (packet instanceof ServerboundChunkTimestampsRequestPacket) return ServerboundChunkTimestampsRequestPacket.PACKET_ID; + throw new IllegalArgumentException("Unknown client packet class " + packet); } public synchronized void shutDown() { - isShutDown = true; - if (channel != null) { - channel.disconnect(); - channel.eventLoop().shutdownGracefully(); - channel = null; - } - if (workerGroup != null && !workerGroup.isShuttingDown()) { - // this also stops any ongoing reconnect timeout - workerGroup.shutdownGracefully(); - workerGroup = null; - } + this.isShutDown = true; + this.isEstablished = false; + this.connection.close(); } - void setUpEncryption(ChannelHandlerContext ctx, ClientboundEncryptionRequestPacket packet) { + private void handleAuthRequest( + final @NotNull WebSocketClient connection, + final @NotNull ClientboundAuthRequestPacket packet + ) { + final var clientSecret = new byte[Long.BYTES]; + ThreadLocalRandom.current().nextBytes(clientSecret); + + // note that this is different from minecraft (we get no negative hashes) + final String shaHex = HexFormat.of().formatHex(Hasher.sha1() + .update(clientSecret) + .update(packet.serverSecret()) + .generateHash() + ); + + final User session = Minecraft.getInstance().getUser(); try { - byte[] sharedSecret = new byte[16]; - ThreadLocalRandom.current().nextBytes(sharedSecret); - - if (!MapSyncMod.getMod().isDevMode()) { - // note that this is different from minecraft (we get no negative hashes) - final String shaHex = HexFormat.of().formatHex(Hasher.sha1() - .update(sharedSecret) - .update(packet.publicKey.getEncoded()) - .generateHash() - ); - - final User session = Minecraft.getInstance().getUser(); - Minecraft.getInstance().getMinecraftSessionService().joinServer( - session.getGameProfile(), - session.getAccessToken(), - shaHex - ); - } + Minecraft.getInstance().getMinecraftSessionService().joinServer( + session.getGameProfile(), + session.getAccessToken(), + shaHex + ); + } + catch (final AuthenticationException authenticationFailure) { + LOGGER.warn("Failed authentication check!"); + connection.close(); + return; + } - try { - ctx.channel().writeAndFlush(new ServerboundEncryptionResponsePacket( - encrypt(packet.publicKey, sharedSecret), - encrypt(packet.publicKey, packet.verifyToken))); - } catch (NoSuchAlgorithmException | InvalidKeyException | NoSuchPaddingException | BadPaddingException | - IllegalBlockSizeException e) { - shutDown(); - throw new RuntimeException(e); - } + INTERNAL_send(new ServerboundAuthResponsePacket( + clientSecret + )); + } - SecretKey secretKey = new SecretKeySpec(sharedSecret, "AES"); - ctx.pipeline() - .addFirst("encrypt", new EncryptionEncoder(secretKey)) - .addFirst("decrypt", new EncryptionDecoder(secretKey)); + private synchronized void handleWelcome( + final @NotNull ClientboundWelcomePacket packet + ) { + this.isEstablished = true; + this.lastError = null; - handleEncryptionSuccess(); - } catch (AuthenticationException e) { - SyncClient.logger.warn("Auth error: " + e.getMessage(), e); + for (final Packet pendingPacket : List.copyOf(this.queue)) { + INTERNAL_send(pendingPacket); } - } - - private static byte[] encrypt(PublicKey key, byte[] data) throws NoSuchPaddingException, NoSuchAlgorithmException, BadPaddingException, IllegalBlockSizeException, InvalidKeyException { - Cipher cipher = Cipher.getInstance("RSA/ECB/PKCS1Padding"); - cipher.init(Cipher.ENCRYPT_MODE, key); - return cipher.doFinal(data); + this.queue.clear(); } } diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionDecoder.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionDecoder.java deleted file mode 100644 index d456051a..00000000 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionDecoder.java +++ /dev/null @@ -1,31 +0,0 @@ -package gjum.minecraft.mapsync.common.net.encryption; - -import io.netty.buffer.ByteBuf; -import io.netty.channel.ChannelHandlerContext; -import io.netty.handler.codec.MessageToMessageDecoder; - -import javax.crypto.Cipher; -import javax.crypto.ShortBufferException; -import javax.crypto.spec.IvParameterSpec; -import java.security.GeneralSecurityException; -import java.security.Key; -import java.util.List; - -public class EncryptionDecoder extends MessageToMessageDecoder { - private final EncryptionTranslator decryptionCodec; - - public EncryptionDecoder(Key key) { - try { - Cipher cipher = Cipher.getInstance("AES/CFB8/NoPadding"); - cipher.init(Cipher.DECRYPT_MODE, key, new IvParameterSpec(key.getEncoded())); - decryptionCodec = new EncryptionTranslator(cipher); - } catch (GeneralSecurityException e) { - throw new RuntimeException(e); - } - } - - @Override - protected void decode(ChannelHandlerContext ctx, ByteBuf in, List out) throws ShortBufferException { - out.add(decryptionCodec.decipher(ctx, in)); - } -} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionEncoder.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionEncoder.java deleted file mode 100644 index ef59d71f..00000000 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionEncoder.java +++ /dev/null @@ -1,30 +0,0 @@ -package gjum.minecraft.mapsync.common.net.encryption; - -import io.netty.buffer.ByteBuf; -import io.netty.channel.ChannelHandlerContext; -import io.netty.handler.codec.MessageToByteEncoder; - -import javax.crypto.Cipher; -import javax.crypto.ShortBufferException; -import javax.crypto.spec.IvParameterSpec; -import java.security.GeneralSecurityException; -import java.security.Key; - -public class EncryptionEncoder extends MessageToByteEncoder { - private final EncryptionTranslator encryptionCodec; - - public EncryptionEncoder(Key key) { - try { - Cipher cipher = Cipher.getInstance("AES/CFB8/NoPadding"); - cipher.init(Cipher.ENCRYPT_MODE, key, new IvParameterSpec(key.getEncoded())); - encryptionCodec = new EncryptionTranslator(cipher); - } catch (GeneralSecurityException e) { - throw new RuntimeException(e); - } - } - - @Override - protected void encode(ChannelHandlerContext ctx, ByteBuf in, ByteBuf out) throws ShortBufferException { - encryptionCodec.encipher(in, out); - } -} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionTranslator.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionTranslator.java deleted file mode 100644 index 080afeca..00000000 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionTranslator.java +++ /dev/null @@ -1,48 +0,0 @@ -package gjum.minecraft.mapsync.common.net.encryption; - -import io.netty.buffer.ByteBuf; -import io.netty.channel.ChannelHandlerContext; - -import javax.crypto.Cipher; -import javax.crypto.ShortBufferException; - -public class EncryptionTranslator { - private final Cipher cipher; - private byte[] inputBuffer = new byte[0]; - private byte[] outputBuffer = new byte[0]; - - protected EncryptionTranslator(Cipher cipher) { - this.cipher = cipher; - } - - private byte[] bufToBytes(ByteBuf buf) { - int i = buf.readableBytes(); - - if (this.inputBuffer.length < i) { - this.inputBuffer = new byte[i]; - } - - buf.readBytes(this.inputBuffer, 0, i); - return this.inputBuffer; - } - - protected ByteBuf decipher(ChannelHandlerContext ctx, ByteBuf buffer) throws ShortBufferException { - int i = buffer.readableBytes(); - byte[] bytes = this.bufToBytes(buffer); - ByteBuf bytebuf = ctx.alloc().heapBuffer(this.cipher.getOutputSize(i)); - bytebuf.writerIndex(this.cipher.update(bytes, 0, i, bytebuf.array(), bytebuf.arrayOffset())); - return bytebuf; - } - - protected void encipher(ByteBuf in, ByteBuf out) throws ShortBufferException { - int i = in.readableBytes(); - byte[] bytes = this.bufToBytes(in); - int j = this.cipher.getOutputSize(i); - - if (this.outputBuffer.length < j) { - this.outputBuffer = new byte[j]; - } - - out.writeBytes(this.outputBuffer, 0, this.cipher.update(bytes, 0, i, this.outputBuffer)); - } -} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundAuthRequestPacket.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundAuthRequestPacket.java new file mode 100644 index 00000000..bb5c5170 --- /dev/null +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundAuthRequestPacket.java @@ -0,0 +1,28 @@ +package gjum.minecraft.mapsync.common.net.packet; + +import gjum.minecraft.mapsync.common.net.Packet; +import io.netty.buffer.ByteBuf; +import java.util.Objects; +import org.jetbrains.annotations.NotNull; + +/** + * You will receive this in response to {@link ServerboundHandshakePacket}, and + * will expect a {@link ServerboundAuthResponsePacket} in response. + */ +public record ClientboundAuthRequestPacket( + byte @NotNull [] serverSecret +) implements Packet { + public static final int PACKET_ID = 2; + + public ClientboundAuthRequestPacket { + Objects.requireNonNull(serverSecret); + } + + public static ClientboundAuthRequestPacket read( + final @NotNull ByteBuf buf + ) { + return new ClientboundAuthRequestPacket( + Packet.readIntLengthByteArray(buf) + ); + } +} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundEncryptionRequestPacket.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundEncryptionRequestPacket.java deleted file mode 100644 index 67ff10cc..00000000 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundEncryptionRequestPacket.java +++ /dev/null @@ -1,42 +0,0 @@ -package gjum.minecraft.mapsync.common.net.packet; - -import gjum.minecraft.mapsync.common.net.Packet; -import io.netty.buffer.ByteBuf; -import org.jetbrains.annotations.NotNull; - -import java.security.*; -import java.security.spec.InvalidKeySpecException; -import java.security.spec.X509EncodedKeySpec; - -/** - * You will receive this in response to {@link ServerboundHandshakePacket}, and - * will expect a {@link ServerboundEncryptionResponsePacket} in response. - */ -public class ClientboundEncryptionRequestPacket implements Packet { - public static final int PACKET_ID = 2; - - public final @NotNull PublicKey publicKey; - public final byte @NotNull [] verifyToken; - - public ClientboundEncryptionRequestPacket(@NotNull PublicKey publicKey, byte @NotNull [] verifyToken) { - this.publicKey = publicKey; - this.verifyToken = verifyToken; - } - - public static Packet read(ByteBuf buf) { - return new ClientboundEncryptionRequestPacket( - readKey(buf), - Packet.readIntLengthByteArray(buf)); - } - - protected static PublicKey readKey(ByteBuf in) { - try { - byte[] encodedKey = Packet.readIntLengthByteArray(in); - X509EncodedKeySpec keySpec = new X509EncodedKeySpec(encodedKey); - KeyFactory keyFactory = KeyFactory.getInstance("RSA"); - return keyFactory.generatePublic(keySpec); - } catch (NoSuchAlgorithmException | InvalidKeySpecException e) { - throw new RuntimeException(e); - } - } -} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundWelcomePacket.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundWelcomePacket.java new file mode 100644 index 00000000..1c533bbb --- /dev/null +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundWelcomePacket.java @@ -0,0 +1,19 @@ +package gjum.minecraft.mapsync.common.net.packet; + +import gjum.minecraft.mapsync.common.net.Packet; +import io.netty.buffer.ByteBuf; +import org.jetbrains.annotations.NotNull; + +/** + * You will receive this in response to {@link ServerboundHandshakePacket}, and + * will expect a {@link ServerboundAuthResponsePacket} in response. + */ +public record ClientboundWelcomePacket() implements Packet { + public static final int PACKET_ID = 9; + + public static ClientboundWelcomePacket read( + final @NotNull ByteBuf buf + ) { + return new ClientboundWelcomePacket(); + } +} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundAuthResponsePacket.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundAuthResponsePacket.java new file mode 100644 index 00000000..7850d022 --- /dev/null +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundAuthResponsePacket.java @@ -0,0 +1,30 @@ +package gjum.minecraft.mapsync.common.net.packet; + +import gjum.minecraft.mapsync.common.net.Packet; +import io.netty.buffer.ByteBuf; +import java.util.Objects; +import org.jetbrains.annotations.NotNull; + +/** + * This is sent to the server in response to a {@link ClientboundAuthRequestPacket}, + * after which, if the connection persists, you are considered authenticated + * with the server. You should then receive a {@link ClientboundRegionTimestampsPacket}. + * + * @param clientSecret encrypted with server's public key + */ +public record ServerboundAuthResponsePacket( + byte @NotNull [] clientSecret +) implements Packet { + public static final int PACKET_ID = 3; + + public ServerboundAuthResponsePacket { + Objects.requireNonNull(clientSecret); + } + + @Override + public void write( + final @NotNull ByteBuf out + ) { + Packet.writeIntLengthByteArray(out, clientSecret()); + } +} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundEncryptionResponsePacket.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundEncryptionResponsePacket.java deleted file mode 100644 index e769f4c4..00000000 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundEncryptionResponsePacket.java +++ /dev/null @@ -1,34 +0,0 @@ -package gjum.minecraft.mapsync.common.net.packet; - -import gjum.minecraft.mapsync.common.net.Packet; -import io.netty.buffer.ByteBuf; -import org.jetbrains.annotations.NotNull; - -/** - * This is sent to the server in response to a {@link ClientboundEncryptionRequestPacket}, - * after which, if the connection persists, you are considered authenticated - * with the server. You should then receive a {@link ClientboundRegionTimestampsPacket}. - */ -public class ServerboundEncryptionResponsePacket implements Packet { - public static final int PACKET_ID = 3; - - /** - * encrypted with server's public key - */ - public final byte[] sharedSecret; - /** - * encrypted with server's public key - */ - public final byte[] verifyToken; - - public ServerboundEncryptionResponsePacket(byte[] sharedSecret, byte[] verifyToken) { - this.sharedSecret = sharedSecret; - this.verifyToken = verifyToken; - } - - @Override - public void write(@NotNull ByteBuf out) { - Packet.writeIntLengthByteArray(out, sharedSecret); - Packet.writeIntLengthByteArray(out, verifyToken); - } -} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundHandshakePacket.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundHandshakePacket.java index e1bbc895..a5bccfb4 100644 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundHandshakePacket.java +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundHandshakePacket.java @@ -6,7 +6,7 @@ /** * This should be sent to the server IMMEDIATELY upon connection. If the - * server accepts the connection, you will receive a {@link ClientboundEncryptionRequestPacket}. + * server accepts the connection, you will receive a {@link ClientboundAuthRequestPacket}. */ public class ServerboundHandshakePacket implements Packet { public static final int PACKET_ID = 1; diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/utils/MagicValues.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/utils/MagicValues.java index d365ba6a..66109e38 100644 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/utils/MagicValues.java +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/utils/MagicValues.java @@ -4,4 +4,8 @@ public final class MagicValues { // SHA1 produces 160-bit (20-byte) hashes // https://en.wikipedia.org/wiki/SHA-1 public static final int SHA1_HASH_LENGTH = 20; + + // Sets the maximum frame length as the maximum 16-bit unsigned int value + // https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 + public static final int MAX_WS_FRAME_SIZE = (1 << 16) - 1; } diff --git a/mod/common/src/main/resources/default-config.json b/mod/common/src/main/resources/default-config.json index c86f8511..054e88e0 100644 --- a/mod/common/src/main/resources/default-config.json +++ b/mod/common/src/main/resources/default-config.json @@ -2,7 +2,7 @@ "servers": { "localhost:25565": { "syncServerAddresses": [ - "localhost:12312" + "ws://localhost:12312" ] } } diff --git a/mod/fabric/build.gradle b/mod/fabric/build.gradle index 955500fd..57aea303 100644 --- a/mod/fabric/build.gradle +++ b/mod/fabric/build.gradle @@ -26,6 +26,9 @@ dependencies { // https://modrinth.com/mod/modmenu/version/3.2.5 (3.2.5 fabric) modCompileOnly("maven.modrinth:modmenu:nVxObSbX") + + // https://github.com/TooTallNate/Java-WebSocket + include("org.java-websocket:Java-WebSocket:1.6.0") } processResources { diff --git a/mod/forge/build.gradle b/mod/forge/build.gradle index 24ccefbd..82bf3ab9 100644 --- a/mod/forge/build.gradle +++ b/mod/forge/build.gradle @@ -26,6 +26,9 @@ dependencies { common(project(path: ":common", configuration: "namedElements")) { transitive false } shadowCommon(project(path: ":common", configuration: "transformProductionForge")) { transitive false } + + // https://github.com/TooTallNate/Java-WebSocket + include("org.java-websocket:Java-WebSocket:1.6.0") } processResources { diff --git a/server/.gitignore b/server/.gitignore index 892bc841..0aaad063 100644 --- a/server/.gitignore +++ b/server/.gitignore @@ -1,5 +1,146 @@ -node_modules/ -/dist/ -*.sqlite -*.pem /mapsync/ + +# Created by https://www.toptal.com/developers/gitignore/api/node +# Edit at https://www.toptal.com/developers/gitignore?templates=node + +### Node ### +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) +web_modules/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional stylelint cache +.stylelintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) +.cache +.parcel-cache + +# Next.js build output +.next +out + +# Nuxt.js build / generate output +.nuxt +dist + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and not Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# vuepress v2.x temp and cache directory +.temp + +# Docusaurus cache and generated files +.docusaurus + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TernJS port file +.tern-port + +# Stores VSCode versions used for testing VSCode extensions +.vscode-test + +# yarn v2 +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* + +### Node Patch ### +# Serverless Webpack directories +.webpack/ + +# Optional stylelint cache + +# SvelteKit build / generate output +.svelte-kit + +# End of https://www.toptal.com/developers/gitignore/api/node diff --git a/server/bun.lock b/server/bun.lock new file mode 100644 index 00000000..664dd14b --- /dev/null +++ b/server/bun.lock @@ -0,0 +1,42 @@ +{ + "lockfileVersion": 1, + "workspaces": { + "": { + "name": "civmap-server", + "dependencies": { + "async-mutex": "^0.4.0", + "kysely": "^0.28.2", + "kysely-bun-sqlite": "^0.4.0", + "zod": "^3.25.57", + }, + "devDependencies": { + "@types/bun": "^1.2.15", + "prettier": "^3.0.1", + "typescript": "^5.8.3", + }, + }, + }, + "packages": { + "@types/bun": ["@types/bun@1.2.15", "", { "dependencies": { "bun-types": "1.2.15" } }, "sha512-U1ljPdBEphF0nw1MIk0hI7kPg7dFdPyM7EenHsp6W5loNHl7zqy6JQf/RKCgnUn2KDzUpkBwHPnEJEjII594bA=="], + + "@types/node": ["@types/node@24.0.0", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-yZQa2zm87aRVcqDyH5+4Hv9KYgSdgwX1rFnGvpbzMaC7YAljmhBET93TPiTd3ObwTL+gSpIzPKg5BqVxdCvxKg=="], + + "async-mutex": ["async-mutex@0.4.1", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-WfoBo4E/TbCX1G95XTjbWTE3X2XLG0m1Xbv2cwOtuPdyH9CZvnaA5nCt1ucjaKEgW2A5IF71hxrRhr83Je5xjA=="], + + "bun-types": ["bun-types@1.2.15", "", { "dependencies": { "@types/node": "*" } }, "sha512-NarRIaS+iOaQU1JPfyKhZm4AsUOrwUOqRNHY0XxI8GI8jYxiLXLcdjYMG9UKS+fwWasc1uw1htV9AX24dD+p4w=="], + + "kysely": ["kysely@0.28.2", "", {}, "sha512-4YAVLoF0Sf0UTqlhgQMFU9iQECdah7n+13ANkiuVfRvlK+uI0Etbgd7bVP36dKlG+NXWbhGua8vnGt+sdhvT7A=="], + + "kysely-bun-sqlite": ["kysely-bun-sqlite@0.4.0", "", { "dependencies": { "bun-types": "^1.1.31" }, "peerDependencies": { "kysely": "^0.28.2" } }, "sha512-2EkQE5sT4ewiw7IWfJsAkpxJ/QPVKXKO5sRYI/xjjJIJlECuOdtG+ssYM0twZJySrdrmuildNPFYVreyu1EdZg=="], + + "prettier": ["prettier@3.5.3", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw=="], + + "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + + "typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="], + + "undici-types": ["undici-types@7.8.0", "", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="], + + "zod": ["zod@3.25.57", "", {}, "sha512-6tgzLuwVST5oLUxXTmBqoinKMd3JeesgbgseXeFasKKj8Q1FCZrHnbqJOyiEvr4cVAlbug+CgIsmJ8cl/pU5FA=="], + } +} diff --git a/server/bunfig.toml b/server/bunfig.toml new file mode 100644 index 00000000..7d1ca567 --- /dev/null +++ b/server/bunfig.toml @@ -0,0 +1,4 @@ +telemetry = false + +[install] +saveTextLockfile = true diff --git a/server/package.json b/server/package.json index 516626f6..d2b39488 100644 --- a/server/package.json +++ b/server/package.json @@ -4,27 +4,27 @@ "private": true, "author": "Gjum", "license": "GPL-3.0-only", + "type": "module", + "module": "src/main.ts", "scripts": { - "build": "tsc", - "format": "prettier -w .", - "test": "true", - "start": "node -r source-map-support/register dist/main.js", - "start:dev": "tsc && node --inspect -r source-map-support/register dist/main.js" + "check:types": "bunx --bun tsc --noEmit --checkJs", + "check:style": "bunx --bun prettier --check .", + "format": "bunx --bun prettier -w .", + "test": "bun test ./src/**/*.test.ts", + "start": "bun src/main.ts", + "start:dev": "bun --inspect src/main.ts", + "compile": "bun build --compile . --outfile out/mapsync-server" }, "dependencies": { "async-mutex": "^0.4.0", - "better-sqlite3": "^9.5.0", - "kysely": "^0.26.1", - "source-map-support": "^0.5.21", - "zod": "^3.21.4", - "zod-validation-error": "^1.3.1" + "kysely": "^0.28.2", + "kysely-bun-sqlite": "^0.4.0", + "zod": "^3.25.57" }, "devDependencies": { - "@types/better-sqlite3": "^7.6.4", - "@types/node": "^18.17.4", - "dotenv": "^16.0.1", + "@types/bun": "^1.2.15", "prettier": "^3.0.1", - "typescript": "^5.1.6" + "typescript": "^5.8.3" }, "prettier": { "useTabs": false, diff --git a/server/src/Renderer.ts b/server/src/Renderer.ts index 3d6f603b..00d4f6e7 100644 --- a/server/src/Renderer.ts +++ b/server/src/Renderer.ts @@ -1,8 +1,9 @@ -import { spawn } from "child_process"; -import { promisify } from "util"; -import * as database from "./database"; +import { spawn } from "node:child_process"; +import { promisify } from "node:util"; +import type DatabaseConnection from "./db/database.ts"; export async function renderTile( + database: DatabaseConnection, dimension: string, tileX: number, tileZ: number, @@ -25,8 +26,8 @@ export async function renderTile( const chunkHeaderBuf = Buffer.allocUnsafe(4 + 4 + 2); // reused. 32+32+16 bit for (const chunk of allChunks) { - chunkHeaderBuf.writeInt32BE(chunk.chunk_x, 0); - chunkHeaderBuf.writeInt32BE(chunk.chunk_z, 4); + chunkHeaderBuf.writeInt32BE(chunk.chunkX, 0); + chunkHeaderBuf.writeInt32BE(chunk.chunkZ, 4); chunkHeaderBuf.writeUInt16BE(chunk.version, 8); await write(chunkHeaderBuf); await write(chunk.data); diff --git a/server/src/cli.ts b/server/src/cli.ts index 81ccf1f5..51fd5c23 100644 --- a/server/src/cli.ts +++ b/server/src/cli.ts @@ -1,15 +1,15 @@ -import lib_readline from "readline"; -import lib_stream from "stream"; +import node_readline from "node:readline"; +import node_stream from "node:stream"; -import * as metadata from "./metadata"; +import * as metadata from "./metadata.ts"; //idk where these come from lol interface TerminalExtras { - output: lib_stream.Writable; + output: node_stream.Writable; _refreshLine(): void; } -type TermType = lib_readline.Interface & TerminalExtras; -const term = lib_readline.createInterface({ +type TermType = node_readline.Interface & TerminalExtras; +const term = node_readline.createInterface({ input: process.stdin, output: process.stdout, }) as TermType; @@ -21,8 +21,8 @@ if (!("MAPSYNC_DUMB_TERM" in process.env)) { var newStdout = Object.create(oldStdout); var oldStderr = process.stderr; var newStderr = Object.create(oldStdout); - function write_func(outout: lib_stream.Writable) { - return function (this: lib_stream.Writable) { + function write_func(outout: node_stream.Writable) { + return function (this: node_stream.Writable) { term.output.write("\x1b[2K\r"); var result = outout.write.apply( this, diff --git a/server/src/constants.ts b/server/src/constants.ts index 94161821..1b872b93 100644 --- a/server/src/constants.ts +++ b/server/src/constants.ts @@ -6,3 +6,10 @@ export const SUPPORTED_VERSIONS = new Set([ // SHA1 produces 160-bit (20-byte) hashes // https://en.wikipedia.org/wiki/SHA-1 export const SHA1_HASH_LENGTH = 20; + +export const UUID_REGEX = + /^(........)-?(....)-?(....)-?(....)-?(............)$/; + +// Sets the maximum frame length as the maximum 16-bit unsigned int value +// https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 +export const MAX_WS_FRAME_LENGTH = (1 << 16) - 1; diff --git a/server/src/database.ts b/server/src/database.ts deleted file mode 100644 index 0b073c05..00000000 --- a/server/src/database.ts +++ /dev/null @@ -1,219 +0,0 @@ -import * as kysely from "kysely"; -import sqlite from "better-sqlite3"; -import { DATA_FOLDER } from "./metadata"; -import { type Pos2D } from "./model"; - -let database: kysely.Kysely | null = null; - -export interface Database { - chunk_data: { - hash: Buffer; - version: number; - data: Buffer; - }; - player_chunk: { - world: string; - chunk_x: number; - chunk_z: number; - uuid: string; - ts: number; - hash: Buffer; - }; -} - -export function get() { - if (!database) { - database = new kysely.Kysely({ - dialect: new kysely.SqliteDialect({ - database: async () => - sqlite( - process.env["SQLITE_PATH"] ?? - `${DATA_FOLDER}/db.sqlite`, - {}, - ), - }), - }); - } - return database; -} - -export async function setup() { - await get() - .schema.createTable("chunk_data") - .ifNotExists() - .addColumn("hash", "blob", (col) => col.notNull().primaryKey()) - .addColumn("version", "integer", (col) => col.notNull()) - .addColumn("data", "blob", (col) => col.notNull()) - .execute(); - await get() - .schema.createTable("player_chunk") - .ifNotExists() - .addColumn("world", "text", (col) => col.notNull()) - .addColumn("chunk_x", "integer", (col) => col.notNull()) - .addColumn("chunk_z", "integer", (col) => col.notNull()) - .addColumn("uuid", "text", (col) => col.notNull()) - .addColumn("ts", "bigint", (col) => col.notNull()) - .addColumn("hash", "blob", (col) => col.notNull()) - .addPrimaryKeyConstraint("PK_coords_and_player", [ - "world", - "chunk_x", - "chunk_z", - "uuid", - ]) - .addForeignKeyConstraint( - "FK_chunk_ref", - ["hash"], - "chunk_data", - ["hash"], - (fk) => fk.onUpdate("no action").onDelete("no action"), - ) - .execute(); -} - -/** - * Converts the entire database of player chunks into regions, with each region - * having the highest (aka newest) timestamp. - */ -export function getRegionTimestamps(dimension: string) { - // computing region coordinates in SQL requires truncating, not rounding - return get() - .selectFrom("player_chunk") - .select([ - (eb) => - kysely.sql`floor(${eb.ref("chunk_x")} / 32.0)`.as( - "regionX", - ), - (eb) => - kysely.sql`floor(${eb.ref("chunk_z")} / 32.0)`.as( - "regionZ", - ), - (eb) => eb.fn.max("ts").as("timestamp"), - ]) - .where("world", "=", dimension) - .groupBy(["regionX", "regionZ"]) - .orderBy("regionX", "desc") - .execute(); -} - -/** - * Converts an array of region coords into an array of timestamped chunk coords. - */ -export async function getChunkTimestamps(dimension: string, regions: Pos2D[]) { - return get() - .with("regions", (db) => - db - .selectFrom("player_chunk") - .select([ - (eb) => - kysely.sql`(cast(floor(${eb.ref( - "chunk_x", - )} / 32.0) as int) || '_' || cast(floor(${eb.ref( - "chunk_z", - )} / 32.0) as int))`.as("region"), - "chunk_x as x", - "chunk_z as z", - (eb) => eb.fn.max("ts").as("timestamp"), - ]) - .where("world", "=", dimension) - .groupBy(["x", "z"]), - ) - .selectFrom("regions") - .select(["x as chunkX", "z as chunkZ", "timestamp"]) - .where( - "region", - "in", - regions.map((region) => region.x + "_" + region.z), - ) - .orderBy("timestamp", "desc") - .execute(); -} - -/** - * Retrieves the data for a given chunk's world, x, z, and timestamp. - * - * TODO: May want to consider making world, x, z, and timestamp a unique in the - * database table... may help performance. - */ -export async function getChunkData( - dimension: string, - chunkX: number, - chunkZ: number, -) { - return get() - .selectFrom("player_chunk") - .innerJoin("chunk_data", "chunk_data.hash", "player_chunk.hash") - .select([ - "chunk_data.hash as hash", - "chunk_data.version as version", - "chunk_data.data as data", - "player_chunk.ts as ts", - ]) - .where("player_chunk.world", "=", dimension) - .where("player_chunk.chunk_x", "=", chunkX) - .where("player_chunk.chunk_z", "=", chunkZ) - .orderBy("player_chunk.ts", "desc") - .limit(1) - .executeTakeFirst(); -} - -/** - * Stores a player's chunk data. - */ -export async function storeChunkData( - dimension: string, - chunkX: number, - chunkZ: number, - uuid: string, - timestamp: number, - version: number, - hash: Buffer, - data: Buffer, -) { - await get() - .insertInto("chunk_data") - .values({ hash, version, data }) - .onConflict((oc) => oc.column("hash").doNothing()) - .execute(); - await get() - .replaceInto("player_chunk") - .values({ - world: dimension, - chunk_x: chunkX, - chunk_z: chunkZ, - uuid, - ts: timestamp, - hash, - }) - .execute(); -} - -/** - * Gets all the [latest] chunks within a region. - */ -export async function getRegionChunks( - dimension: string, - regionX: number, - regionZ: number, -) { - const minChunkX = regionX << 4, - maxChunkX = minChunkX + 16; - const minChunkZ = regionZ << 4, - maxChunkZ = minChunkZ + 16; - return get() - .selectFrom("player_chunk") - .innerJoin("chunk_data", "chunk_data.hash", "player_chunk.hash") - .select([ - "player_chunk.chunk_x as chunk_x", - "player_chunk.chunk_z as chunk_z", - (eb) => eb.fn.max("player_chunk.ts").as("timestamp"), - "chunk_data.version as version", - "chunk_data.data as data", - ]) - .where("player_chunk.world", "=", dimension) - .where("player_chunk.chunk_x", ">=", minChunkX) - .where("player_chunk.chunk_x", "<", maxChunkX) - .where("player_chunk.chunk_z", ">=", minChunkZ) - .where("player_chunk.chunk_z", "<", maxChunkZ) - .orderBy("player_chunk.ts", "desc") - .execute(); -} diff --git a/server/src/db/database.test.ts b/server/src/db/database.test.ts new file mode 100644 index 00000000..dc3620a5 --- /dev/null +++ b/server/src/db/database.test.ts @@ -0,0 +1,8 @@ +import { test } from "bun:test"; + +import DatabaseConnection from "./database.ts"; + +test("testMigrations", async () => { + const database = new DatabaseConnection(":memory:"); + await database.setup(); +}); diff --git a/server/src/db/database.ts b/server/src/db/database.ts new file mode 100644 index 00000000..22d2c1cc --- /dev/null +++ b/server/src/db/database.ts @@ -0,0 +1,197 @@ +import { Database as BunSqliteDatabase } from "bun:sqlite"; + +import { Kysely, type Generated, Migrator } from "kysely"; +import { BunSqliteDialect } from "kysely-bun-sqlite"; + +import Migrations from "./migrations.ts"; +import { type Pos2D } from "../model.ts"; + +export default class DatabaseConnection { + public readonly internal: Kysely<{ + chunk_data: { + hash: Buffer; + version: number; + data: Buffer; + }; + player_chunk: { + world: string; + chunk_x: number; + chunk_z: number; + gen_region_x: Generated; + gen_region_z: Generated; + gen_region_coord: Generated; + uuid: string; + ts: number; + hash: Buffer; + }; + }>; + + /** + * See {@link BunSqliteDatabase}'s constructor and {@link https://bun.sh/docs/api/sqlite Bun SQLite} for + * documentation on what this function can accept. + */ + public constructor( + ...args: ConstructorParameters + ) { + this.internal = new Kysely({ + dialect: new BunSqliteDialect({ + database: new BunSqliteDatabase(...args), + }), + }); + } + + public getMigrations(): Migrator { + return new Migrator({ + db: this.internal, + provider: new Migrations(), + }); + } + + /** Convenience function to migrate to latest */ + public async setup() { + const results = await this.getMigrations().migrateToLatest(); + for (const result of results.results ?? []) { + switch (result.status) { + case "Success": + console.info( + `Migration [${result.migrationName}] applied!`, + ); + break; + case "Error": + console.error( + `Migration [${result.migrationName}] failed to apply!`, + ); + break; + case "NotExecuted": + console.warn( + `Migration [${result.migrationName}] was not applied!`, + ); + break; + } + } + if (results.error) { + throw results.error; + } + } + + /** Gets the timestamps for ALL regions stored. */ + async getRegionTimestamps(dimension: string) { + return await this.internal + .selectFrom("player_chunk") + .select([ + "gen_region_x as regionX", + "gen_region_z as regionZ", + (eb) => eb.fn.max("ts").as("timestamp"), + ]) + .where("world", "=", dimension) + .groupBy(["regionX", "regionZ"]) + .orderBy("timestamp", "asc") + .execute(); + } + + public async getChunkTimestamps(dimension: string, regions: Pos2D[]) { + return await this.internal + .selectFrom("player_chunk") + .select([ + "chunk_x as chunkX", + "chunk_z as chunkZ", + (eb) => eb.fn.max("ts").as("timestamp"), + ]) + .where( + "gen_region_coord", + "in", + regions.map((region) => region.x + "_" + region.z), + ) + .where("world", "=", dimension) + .groupBy(["chunkX", "chunkZ"]) + .orderBy("timestamp", "desc") + .execute(); + } + + /** + * Retrieves the data for a given chunk's world, x, z, and timestamp. + * + * TODO: May want to consider making world, x, z, and timestamp a unique in + * the database table... may help performance. + */ + public async getChunkData( + dimension: string, + chunkX: number, + chunkZ: number, + ) { + return await this.internal + .selectFrom("player_chunk") + .innerJoin("chunk_data", "chunk_data.hash", "player_chunk.hash") + .select([ + "chunk_data.hash as hash", + "chunk_data.version as version", + "chunk_data.data as data", + "player_chunk.ts as ts", + ]) + .where("player_chunk.world", "=", dimension) + .where("player_chunk.chunk_x", "=", chunkX) + .where("player_chunk.chunk_z", "=", chunkZ) + .orderBy("player_chunk.ts", "desc") + .limit(1) + .executeTakeFirst(); + } + + /** + * Stores a player's chunk data. + */ + public async storeChunkData( + dimension: string, + chunkX: number, + chunkZ: number, + uuid: string, + timestamp: number, + version: number, + hash: Buffer, + data: Buffer, + ) { + await this.internal.transaction().execute(async (transaction) => { + await transaction + .insertInto("chunk_data") + .values({ hash, version, data }) + .onConflict((oc) => oc.column("hash").doNothing()) + .execute(); + await transaction + .replaceInto("player_chunk") + .values({ + world: dimension, + chunk_x: chunkX, + chunk_z: chunkZ, + uuid, + ts: timestamp, + hash, + }) + .execute(); + }); + } + + /** + * Gets all the [latest] chunks within a region. + */ + public async getRegionChunks( + dimension: string, + regionX: number, + regionZ: number, + ) { + return await this.internal + .selectFrom("player_chunk") + .innerJoin("chunk_data", "chunk_data.hash", "player_chunk.hash") + .select([ + "player_chunk.chunk_x as chunkX", + "player_chunk.chunk_z as chunkZ", + (eb) => eb.fn.max("player_chunk.ts").as("timestamp"), + "chunk_data.version as version", + "chunk_data.data as data", + ]) + .where("player_chunk.world", "=", dimension) + .where("player_chunk.gen_region_x", "=", regionX) + .where("player_chunk.gen_region_z", "=", regionZ) + .groupBy(["chunkX", "chunkZ", "version", "data"]) + .orderBy("timestamp", "desc") + .execute(); + } +} diff --git a/server/src/db/migrations.ts b/server/src/db/migrations.ts new file mode 100644 index 00000000..076d097b --- /dev/null +++ b/server/src/db/migrations.ts @@ -0,0 +1,117 @@ +import { Kysely, sql, type Migration, type MigrationProvider } from "kysely"; + +type MigrationRegistry = Record; +type MigrationClass = { name: string } & (new () => Migration); + +export default class Migrations implements MigrationProvider { + public async getMigrations(): Promise { + return this.generateMigrationRegistry([ + Migration_0001_InitialSetup, + Migration_0002_GenerateRegionCoordColumns, + ]); + } + + private generateMigrationRegistry( + migrations: Array, + ): MigrationRegistry { + const registry: MigrationRegistry = {}; + for (const clazz of migrations) { + registry[clazz.name] = new clazz(); + } + return registry; + } +} + +// ============================================================ +// WARNING FOR WRITING MIGRATIONS! +// +// Kysely does not respect class functions: your "up" and "down" methods MUST +// be fields, not class functions, otherwise your migration will fail! +// ============================================================ + +export class Migration_0001_InitialSetup implements Migration { + public up = async (db: Kysely) => { + await db.transaction().execute(async (transaction) => { + await transaction.schema + .createTable("chunk_data") + .ifNotExists() + .addColumn("hash", "blob", (col) => col.notNull().primaryKey()) + .addColumn("version", "integer", (col) => col.notNull()) + .addColumn("data", "blob", (col) => col.notNull()) + .execute(); + await transaction.schema + .createTable("player_chunk") + .ifNotExists() + .addColumn("world", "text", (col) => col.notNull()) + .addColumn("chunk_x", "integer", (col) => col.notNull()) + .addColumn("chunk_z", "integer", (col) => col.notNull()) + .addColumn("uuid", "text", (col) => col.notNull()) + .addColumn("ts", "bigint", (col) => col.notNull()) + .addColumn("hash", "blob", (col) => col.notNull()) + .addPrimaryKeyConstraint("PK_coords_and_player", [ + "world", + "chunk_x", + "chunk_z", + "uuid", + ]) + .addForeignKeyConstraint( + "FK_chunk_ref", + ["hash"], + "chunk_data", + ["hash"], + (fk) => fk.onUpdate("no action").onDelete("no action"), + ) + .execute(); + }); + }; + // Probably shouldn't define a "down" since that just means an empty db +} + +export class Migration_0002_GenerateRegionCoordColumns implements Migration { + public up = async (db: Kysely) => { + await db.transaction().execute(async (transaction) => { + await transaction.schema + .alterTable("player_chunk") + .addColumn("gen_region_x", "integer", (col) => { + return col + .generatedAlwaysAs(sql`floor(chunk_x / 32.0)`) + .notNull(); + }) + .execute(); + await transaction.schema + .alterTable("player_chunk") + .addColumn("gen_region_z", "integer", (col) => { + return col + .generatedAlwaysAs(sql`floor(chunk_z / 32.0)`) + .notNull(); + }) + .execute(); + await transaction.schema + .alterTable("player_chunk") + .addColumn("gen_region_coord", "text", (col) => { + return col + .generatedAlwaysAs( + sql`gen_region_x || '_' || gen_region_z`, + ) + .notNull(); + }) + .execute(); + }); + }; + public down = async (db: Kysely) => { + await db.transaction().execute(async (transaction) => { + await transaction.schema + .alterTable("player_chunk") + .dropColumn("gen_region_coord") + .execute(); + await transaction.schema + .alterTable("player_chunk") + .dropColumn("gen_region_x") + .execute(); + await transaction.schema + .alterTable("player_chunk") + .dropColumn("gen_region_z") + .execute(); + }); + }; +} diff --git a/server/src/deps/errors.ts b/server/src/deps/errors.ts deleted file mode 100644 index 129a0314..00000000 --- a/server/src/deps/errors.ts +++ /dev/null @@ -1,49 +0,0 @@ -import node_os from "node:os"; -import node_utils from "node:util"; - -export enum ErrorType { - FileExists, - FileNotFound, - UNKNOWN, -} - -/** - * Attempts to transform Node's less-than-helpful exceptions into something - * more readable and logic-able. - */ -export function getErrorType(error: any): ErrorType { - switch (Math.abs(error.errno ?? Infinity)) { - case node_os.constants.errno.ENOENT: - return ErrorType.FileNotFound; - case node_os.constants.errno.EEXIST: - return ErrorType.FileExists; - default: - return ErrorType.UNKNOWN; - } -} - -/** - * Utility that guarantees that the error is an instance of Error. - */ -export function ensureError(error: any): Error { - if (error instanceof Error) { - return error; - } - switch (typeof error) { - case "string": - return new Error(error); - case "number": - case "bigint": - return new Error(String(error)); - } - return new Error(node_utils.inspect(error)); -} - -/** - * This is useful in cases where you need to throw but can't because of - * Javascript. Read more for context: - * https://www.proposals.es/proposals/throw%20expressions - */ -export function inlineThrow(error: any): T { - throw error; -} diff --git a/server/src/deps/json.ts b/server/src/deps/json.ts deleted file mode 100644 index 21eda8f6..00000000 --- a/server/src/deps/json.ts +++ /dev/null @@ -1,16 +0,0 @@ -export type JSONObject = { [key: string]: JSONValue | undefined }; -export type JSONArray = JSONValue[]; -export type JSONValue = - | JSONObject - | JSONArray - | string - | number - | boolean - | null; - -/** - * Wrapper function for JSON.parse() that provides a proper return type. - */ -export function parse(raw: string): JSONValue { - return JSON.parse(raw); -} diff --git a/server/src/lang.ts b/server/src/lang.ts new file mode 100644 index 00000000..70759493 --- /dev/null +++ b/server/src/lang.ts @@ -0,0 +1,73 @@ +import node_os from "node:os"; +import node_utils from "node:util"; + +export const INT8_SIZE = 1; +export const INT16_SIZE = 2; +export const INT32_SIZE = 4; +export const INT64_SIZE = 8; + +export type JSONObject = { [key: string]: JSONValue | undefined }; +export type JSONArray = JSONValue[]; +export type JSONValue = + | JSONObject + | JSONArray + | string + | number + | boolean + | null; + +/** + * Wrapper function for JSON.parse() that provides a proper return type. + */ +export function parseJson(raw: string): JSONValue { + return JSON.parse(raw); +} + +export namespace Errors { + export enum ErrorType { + FileExists, + FileNotFound, + UNKNOWN, + } + + /** + * Attempts to transform Node's less-than-helpful exceptions into something + * more readable and logic-able. + */ + export function getErrorType(error: any): ErrorType { + switch (Math.abs(error.errno ?? Infinity)) { + case node_os.constants.errno.ENOENT: + return ErrorType.FileNotFound; + case node_os.constants.errno.EEXIST: + return ErrorType.FileExists; + default: + return ErrorType.UNKNOWN; + } + } + + /** + * Utility that guarantees that the error is an instance of Error. + */ + export function ensureError(error: any): Error { + if (error instanceof Error) { + return error; + } + switch (typeof error) { + case "string": + return new Error(error); + case "number": + case "bigint": + return new Error(String(error)); + } + return new Error(node_utils.inspect(error)); + } + + /** + * This is useful in cases where you need to throw but can't because of + * Javascript. Read more for context: + * https://www.proposals.es/proposals/throw%20expressions + */ + export function inlineThrow(error: any): T { + throw error; + } +} diff --git a/server/src/main.ts b/server/src/main.ts index c082e3c2..3244871e 100644 --- a/server/src/main.ts +++ b/server/src/main.ts @@ -1,14 +1,31 @@ -import "./cli"; -import * as database from "./database"; -import * as metadata from "./metadata"; -import { ClientPacket } from "./protocol"; -import { CatchupRequestPacket } from "./protocol/CatchupRequestPacket"; -import { ChunkTilePacket } from "./protocol/ChunkTilePacket"; -import { TcpClient, TcpServer } from "./server"; -import { RegionCatchupPacket } from "./protocol/RegionCatchupPacket"; +import "./cli.ts"; +import DatabaseConnection from "./db/database.ts"; +import * as metadata from "./metadata.ts"; +import { + type ClientPacket, + encodePacketToBytes, + UnexpectedPacket, +} from "./net/protocol.ts"; +import { type ProtocolHandler, TcpClient, TcpServer } from "./net/server.ts"; +import { + ChunkTilePacket, + ClientboundChunkTimestampsResponsePacket, + ClientboundRegionTimestampsPacket, + ServerboundCatchupRequestPacket, + ServerboundChunkTimestampsRequestPacket, +} from "./net/packets.ts"; +import { isAuthed, OnlineAuth, requireAuth } from "./net/auth.ts"; +import { DATA_FOLDER } from "./metadata.ts"; let config: metadata.Config = null!; Promise.resolve().then(async () => { + const database = new DatabaseConnection( + Bun.env["SQLITE_PATH"] ?? `${DATA_FOLDER}/db.sqlite`, + { + create: true, + readwrite: true, + }, + ); await database.setup(); config = metadata.getConfig(); @@ -18,142 +35,179 @@ Promise.resolve().then(async () => { await metadata.loadWhitelist(); await metadata.loadUuidCache(); - new Main(); -}); - -type ProtocolClient = TcpClient; // TODO cleanup - -export class Main { - server = new TcpServer(this); - - //Cannot be async, as it's caled from a synchronous constructor - handleClientConnected(client: ProtocolClient) {} + const server = new TcpServer( + config.host, + config.port, + new (class implements ProtocolHandler { + public async handleClientConnected(client: TcpClient) {} + + public async handleClientDisconnected(client: TcpClient) {} + + public async handleClientAuthenticated(client: TcpClient) { + if (client.auth instanceof OnlineAuth) { + metadata.cachePlayerUuid( + client.auth.name, + client.auth.uuid, + ); + await metadata.saveUuidCache(); + + if ( + config.whitelist && + !metadata.whitelist.has(client.auth.uuid) + ) { + client.kick( + `Not whitelisted! [${Bun.inspect(client.auth)}]`, + ); + return; + } + } + + // TODO check version, mc server, user access + + await client.send( + new ClientboundRegionTimestampsPacket( + client.dimension!, + await database.getRegionTimestamps(client.dimension!), + ), + ); + } - async handleClientAuthenticated(client: ProtocolClient) { - if (!client.uuid) throw new Error("Client not authenticated"); + public async handleClientPacketReceived( + client: TcpClient, + packet: ClientPacket, + ) { + switch (packet.type) { + case ChunkTilePacket.TYPE: + await this.handleChunkTilePacket( + client, + packet as ChunkTilePacket, + ); + return; + case ServerboundCatchupRequestPacket.TYPE: + await this.handleCatchupRequest( + client, + packet as ServerboundCatchupRequestPacket, + ); + return; + case ServerboundChunkTimestampsRequestPacket.TYPE: + await this.handleRegionCatchupPacket( + client, + packet as ServerboundChunkTimestampsRequestPacket, + ); + return; + default: + throw new UnexpectedPacket(packet.type.toString()); + } + } - metadata.cachePlayerUuid(client.mcName!, client.uuid!); - await metadata.saveUuidCache(); + private async handleRegionCatchupPacket( + client: TcpClient, + packet: ServerboundChunkTimestampsRequestPacket, + ) { + requireAuth(client); + + if (packet.regions.length < 1) { + client.warn( + "Client requested chunk-timestamps without specifying any regions.", + ); + return; + } + + const chunks = await database.getChunkTimestamps( + packet.dimension, + packet.regions.map((region) => ({ + x: region.regionX, + z: region.regionZ, + })), + ); - if (config.whitelist) { - if (!metadata.whitelist.has(client.uuid)) { - client.log( - `Rejected unwhitelisted user ${client.mcName} (${client.uuid})`, + if (chunks.length < 1) { + client.warn( + `Client's request chunk-timestamps for [${packet.regions.length}] regions has no results.`, + ); + return; + } + + await client.send( + new ClientboundChunkTimestampsResponsePacket( + packet.dimension, + chunks, + ), ); - client.kick(`Not whitelisted`); - return; } - } - - // TODO check version, mc server, user access - - const timestamps = await database.getRegionTimestamps(client.world!); - client.send({ - type: "RegionTimestamps", - world: client.world!, - regions: timestamps, - }); - } - - handleClientDisconnected(client: ProtocolClient) {} - - handleClientPacketReceived(client: ProtocolClient, pkt: ClientPacket) { - client.debug(client.mcName + " <- " + pkt.type); - switch (pkt.type) { - case "ChunkTile": - return this.handleChunkTilePacket(client, pkt); - case "CatchupRequest": - return this.handleCatchupRequest(client, pkt); - case "RegionCatchup": - return this.handleRegionCatchupPacket(client, pkt); - default: - throw new Error( - `Unknown packet '${(pkt as any).type}' from client ${ - client.id - }`, - ); - } - } - - async handleChunkTilePacket(client: ProtocolClient, pkt: ChunkTilePacket) { - if (!client.uuid) - throw new Error(`${client.name} is not authenticated`); - - // TODO ignore if same chunk hash exists in db - - await database - .storeChunkData( - pkt.world, - pkt.chunk_x, - pkt.chunk_z, - client.uuid, - pkt.ts, - pkt.data.version, - pkt.data.hash, - pkt.data.data, - ) - .catch(console.error); - - // TODO small timeout, then skip if other client already has it - for (const otherClient of Object.values(this.server.clients)) { - if (client === otherClient) continue; - otherClient.send(pkt); - } - - // TODO queue tile render for web map - } - - async handleCatchupRequest( - client: ProtocolClient, - pkt: CatchupRequestPacket, - ) { - if (!client.uuid) - throw new Error(`${client.name} is not authenticated`); - - for (const req of pkt.chunks) { - let chunk = await database.getChunkData( - pkt.world, - req.chunkX, - req.chunkZ, - ); - if (!chunk) { - console.error(`${client.name} requested unavailable chunk`, { - world: pkt.world, - ...req, - }); - continue; + + private async handleCatchupRequest( + client: TcpClient, + packet: ServerboundCatchupRequestPacket, + ) { + requireAuth(client); + + for (const req of packet.chunks) { + let chunk = await database.getChunkData( + packet.dimension, + req.chunkX, + req.chunkZ, + ); + if (!chunk) { + console.error( + `${client.name} requested unavailable chunk`, + { + world: packet.dimension, + ...req, + }, + ); + continue; + } + + if (chunk.ts > req.timestamp) continue; // someone sent a new chunk, which presumably got relayed to the client + if (chunk.ts < req.timestamp) continue; // the client already has a chunk newer than this + + await client.send( + new ChunkTilePacket( + packet.dimension, + req.chunkX, + req.chunkZ, + req.timestamp, + chunk.version, + chunk.hash, + chunk.data, + ), + ); + } } - if (chunk.ts > req.timestamp) continue; // someone sent a new chunk, which presumably got relayed to the client - if (chunk.ts < req.timestamp) continue; // the client already has a chunk newer than this - - client.send({ - type: "ChunkTile", - world: pkt.world, - chunk_x: req.chunkX, - chunk_z: req.chunkX, - ts: req.timestamp, - data: { - hash: chunk.hash, - data: chunk.data, - version: chunk.version, - }, - }); - } - } - - async handleRegionCatchupPacket( - client: ProtocolClient, - pkt: RegionCatchupPacket, - ) { - if (!client.uuid) - throw new Error(`${client.name} is not authenticated`); - - const chunks = await database.getChunkTimestamps( - pkt.world, - pkt.regions, - ); - if (chunks.length) - client.send({ type: "Catchup", world: pkt.world, chunks }); - } -} + private async handleChunkTilePacket( + client: TcpClient, + packet: ChunkTilePacket, + ) { + requireAuth(client); + + if (client.auth instanceof OnlineAuth) { + await database + .storeChunkData( + packet.dimension, + packet.chunkX, + packet.chunkZ, + client.auth.uuid, + packet.timestamp, + packet.version, + packet.hash, + packet.data, + ) + .catch(client.warn); + } + + // TODO small timeout, then skip if other client already has it + const packetRaw = encodePacketToBytes(packet); + await Promise.allSettled( + server.clients + .values() + .filter((other) => other !== client && isAuthed(other)) + .map((other) => other.sendRaw(packet.type, packetRaw)), + ); + + // TODO queue tile render for web map + } + })(), + ); +}); diff --git a/server/src/metadata.ts b/server/src/metadata.ts index f2519833..f0d40dbf 100644 --- a/server/src/metadata.ts +++ b/server/src/metadata.ts @@ -1,17 +1,15 @@ import node_fs from "node:fs"; import node_path from "node:path"; import { Mutex } from "async-mutex"; -import * as errors from "./deps/errors"; -import * as json from "./deps/json"; -import * as z from "zod"; -import { fromZodError } from "zod-validation-error"; +import z, { prettifyError } from "zod/v4"; +import { Errors, type JSONValue, parseJson } from "./lang.ts"; export const DATA_FOLDER = process.env["MAPSYNC_DATA_DIR"] ?? "./mapsync"; try { node_fs.mkdirSync(DATA_FOLDER, { recursive: true }); console.log(`Created data folder "${DATA_FOLDER}"`); } catch (e: any) { - if (errors.getErrorType(e) !== errors.ErrorType.FileExists) throw e; + if (Errors.getErrorType(e) !== Errors.ErrorType.FileExists) throw e; console.log(`Using data folder "${DATA_FOLDER}"`); } @@ -25,7 +23,7 @@ try { */ function parseConfigFile( file: string, - parser: (raw: json.JSONValue) => T, + parser: (raw: JSONValue) => T, defaultSupplier: () => any, ): T { file = node_path.resolve(DATA_FOLDER, file); @@ -33,7 +31,7 @@ function parseConfigFile( try { fileContents = node_fs.readFileSync(file, "utf8"); } catch (e) { - if (errors.getErrorType(e) !== errors.ErrorType.FileNotFound) { + if (Errors.getErrorType(e) !== Errors.ErrorType.FileNotFound) { throw e; } // Could not find the config file, so attempt to create a default one @@ -46,10 +44,10 @@ function parseConfigFile( return defaultContent; } try { - return parser(json.parse(fileContents)); + return parser(parseJson(fileContents)); } catch (e) { if (e instanceof z.ZodError) { - throw "Could not parse " + file + ": " + fromZodError(e); + throw "Could not parse " + file + ": " + prettifyError(e); } throw e; } @@ -125,7 +123,7 @@ export async function saveWhitelist() { const UUID_CACHE_FILE = "uuid_cache.json"; const UUID_CACHE_MUTEX = new Mutex(); -const UUID_CACHE_SCHEMA = z.record(z.string().uuid()); +const UUID_CACHE_SCHEMA = z.record(z.string(), z.uuid()); // IGN UUID const uuid_cache = new Map(); diff --git a/server/src/net/auth.ts b/server/src/net/auth.ts new file mode 100644 index 00000000..f0632569 --- /dev/null +++ b/server/src/net/auth.ts @@ -0,0 +1,171 @@ +import node_crypto from "node:crypto"; + +import { z } from "zod/v4"; + +import { type TcpClient } from "./server.ts"; +import { + ClientboundAuthRequestPacket, + ClientboundWelcomePacket, + ServerboundAuthResponsePacket, + type ServerboundHandshakePacket, +} from "./packets.ts"; +import { UnexpectedPacket } from "./protocol.ts"; +import { SUPPORTED_VERSIONS, UUID_REGEX } from "../constants.ts"; +import { INT64_SIZE } from "../lang.ts"; + +// ============================================================ +// Handshake +// ============================================================ + +class AwaitingHandshake {} + +export async function handleConnected(client: TcpClient) { + client.auth = new AwaitingHandshake(); +} + +export async function handleHandshake( + client: TcpClient, + packet: ServerboundHandshakePacket, +) { + if (!(client.auth instanceof AwaitingHandshake)) { + throw new UnexpectedPacket(packet.type.toString()); + } + + if (!SUPPORTED_VERSIONS.has(packet.modVersion)) { + client.kick( + `Connected with unsupported version [${packet.modVersion}]`, + ); + return; + } + + client.gameAddress = packet.gameAddress; + client.dimension = packet.dimension; + + if (Bun.env["MAPSYNC_DISABLE_AUTH"] === "true") { + client.auth = new OfflineAuth(packet.mojangName); + client.name += "?:" + packet.mojangName; + await client.send(new ClientboundWelcomePacket()); + return; + } + + const serverSecret = node_crypto.randomBytes(INT64_SIZE); + client.auth = new AwaitingAuthResponse(serverSecret, packet.mojangName); + await client.send(new ClientboundAuthRequestPacket(serverSecret)); +} + +// ============================================================ +// Encryption Response +// ============================================================ + +class AwaitingAuthResponse { + public constructor( + public readonly serverSecret: Buffer, + public readonly claimedMojangUsername: string, + ) {} +} + +export async function handleAuthResponse( + client: TcpClient, + packet: ServerboundAuthResponsePacket, +) { + if (!(client.auth instanceof AwaitingAuthResponse)) { + throw new UnexpectedPacket(packet.type.toString()); + } + + const auth = await fetchHasJoined( + client, + client.auth.claimedMojangUsername, + node_crypto + .createHash("sha1") + .update(packet.clientSecret) + .update(client.auth.serverSecret) + .digest() + .toString("hex"), + ); + if (auth === null) { + client.kick("Not authenticated!"); + return; + } + + client.auth = new OnlineAuth(auth.name, auth.uuid); + client.name += ":" + auth.name; + await client.send(new ClientboundWelcomePacket()); + + await client.handlers.handleClientAuthenticated(client); +} + +// ============================================================ +// Authentication +// ============================================================ + +export class OfflineAuth { + public constructor(public readonly name: string) {} +} + +export class OnlineAuth { + public constructor( + public readonly name: string, + public readonly uuid: string, + ) {} +} + +export function isAuthed(client: TcpClient) { + return ( + client.auth instanceof OnlineAuth || client.auth instanceof OfflineAuth + ); +} + +export function requireAuth(client: TcpClient) { + if (!isAuthed(client)) { + throw new Error("User not authenticated!"); + } +} + +const MOJANG_AUTH_RESPONSE_SCHEMA = z.object({ + id: z.string().regex(UUID_REGEX), + name: z.string(), +}); + +async function fetchHasJoined( + client: TcpClient, + username: string, + shaHex: string, +): Promise<{ + name: string; + uuid: string; +} | null> { + let url = `https://sessionserver.mojang.com/session/minecraft/hasJoined?username=${username}&serverId=${shaHex}`; + + let response: Response; + try { + response = await fetch(url); + } catch (error) { + client.warn("Could not complete auth request!", error); + return null; + } + if (response.status === 204) { + return null; + } + + let raw: unknown; + try { + raw = await response.json(); + } catch (error) { + client.warn("Could not parse auth response as json!", error); + return null; + } + + let auth: z.infer; + try { + auth = MOJANG_AUTH_RESPONSE_SCHEMA.parse(raw); + } catch (error) { + client.warn("Could not validate auth response!"); + client.warn(z.prettifyError(error as z.ZodError)); + return null; + } + + return { + name: auth.name, + uuid: auth.id.replace(UUID_REGEX, "$1-$2-$3-$4-$5"), + }; +} diff --git a/server/src/net/buffers.ts b/server/src/net/buffers.ts new file mode 100644 index 00000000..6cc39e26 --- /dev/null +++ b/server/src/net/buffers.ts @@ -0,0 +1,173 @@ +import { ArrayBufferSink } from "bun"; + +export class BufferWriter { + private readonly sink: ArrayBufferSink; + private readonly view = new DataView(new ArrayBuffer(8)); // 64 bits + + public constructor() { + this.sink = new ArrayBufferSink(); + this.sink.start({ + asUint8Array: true, + stream: true, + }); + } + + public getBuffer(): Buffer { + return Buffer.from(this.sink.flush() as Uint8Array); + } + + public writeUnt8(val: number) { + this.view.setUint8(0, val); + this.sink.write(this.view.buffer.slice(0, 1)); + } + + public writeInt8(val: number) { + this.view.setInt8(0, val); + this.sink.write(this.view.buffer.slice(0, 1)); + } + + public writeUnt16(val: number) { + this.view.setUint16(0, val); + this.sink.write(this.view.buffer.slice(0, 2)); + } + + public writeInt16(val: number) { + this.view.setInt16(0, val); + this.sink.write(this.view.buffer.slice(0, 2)); + } + + public writeUnt32(val: number) { + this.view.setUint32(0, val); + this.sink.write(this.view.buffer.slice(0, 4)); + } + + public writeInt32(val: number) { + this.view.setInt32(0, val); + this.sink.write(this.view.buffer.slice(0, 4)); + } + + public writeUnt64(val: number | bigint) { + if (typeof val === "number") { + val = BigInt(val); + } + this.view.setBigUint64(0, val); + this.sink.write(this.view.buffer); + } + + public writeInt64(val: number | bigint) { + if (typeof val === "number") { + val = BigInt(val); + } + this.view.setBigInt64(0, val); + this.sink.write(this.view.buffer); + } + + /** length-prefixed (u32), UTF-8 encoded */ + readonly #stringEncoder = new TextEncoder(); + public writeString(str: string) { + const bytes = this.#stringEncoder.encode(str); + this.writeUnt32(bytes.byteLength); + this.sink.write(bytes); + } + + /** length-prefixed (u32), UTF-8 encoded */ + public writeBufWithLen(buf: Buffer) { + this.writeUnt32(buf.byteLength); + this.writeBufRaw(buf); + } + + public writeBufRaw(buf: Buffer) { + this.sink.write(buf); + } +} + +/** Each read advances the internal offset into the buffer. */ +export class BufferReader { + private offset = 0; + + public constructor(private readonly buffer: Buffer) {} + + public get remainder(): number { + return this.buffer.length - this.offset; + } + + public readUnt8(): number { + const val = this.buffer.readUInt8(this.offset); + this.offset += 1; + return val; + } + + public readInt8(): number { + const val = this.buffer.readInt8(this.offset); + this.offset += 1; + return val; + } + + public readUnt16(): number { + const val = this.buffer.readUInt16BE(this.offset); + this.offset += 2; + return val; + } + + public readInt16(): number { + const val = this.buffer.readInt16BE(this.offset); + this.offset += 2; + return val; + } + + public readUnt32(): number { + const val = this.buffer.readUInt32BE(this.offset); + this.offset += 4; + return val; + } + + public readInt32(): number { + const val = this.buffer.readInt32BE(this.offset); + this.offset += 4; + return val; + } + + public readUnt64(): number { + const val = this.buffer.readBigUInt64BE(this.offset); + if (val > Number.MAX_SAFE_INTEGER) { + throw new Error(`64-bit number too big: ${val}`); + } + this.offset += 8; + return Number(val); + } + + public readInt64(): number { + const val = this.buffer.readBigInt64BE(this.offset); + if (val > Number.MAX_SAFE_INTEGER) { + throw new Error(`64-bit number too big: ${val}`); + } + if (val < Number.MIN_SAFE_INTEGER) { + throw new Error(`64-bit number too small: ${val}`); + } + this.offset += 8; + return Number(val); + } + + readonly #stringDecoder = new TextDecoder("utf-8"); + /** length-prefixed (u32), UTF-8 encoded */ + public readString(): string { + return this.#stringDecoder.decode(this.readBufWithLen()); + } + + public readBufWithLen(): Buffer { + return this.readBufLen(this.readUnt32()); + } + + public readBufLen(length: number): Buffer { + // simply returning a slice() would retain the entire buf in memory + const buffer = Buffer.allocUnsafe(length); + this.buffer.copy(buffer, 0, this.offset, this.offset + length); + this.offset += length; + return buffer; + } + + /** any reads after this will fail */ + public readRemainder(): Buffer { + return this.readBufLen(this.remainder); + } +} diff --git a/server/src/net/packets.ts b/server/src/net/packets.ts new file mode 100644 index 00000000..842d97f9 --- /dev/null +++ b/server/src/net/packets.ts @@ -0,0 +1,214 @@ +import { BufferWriter, BufferReader } from "./buffers.ts"; +import { SHA1_HASH_LENGTH } from "../constants.ts"; + +interface Packet { + type: Symbol; +} + +function readArray(length: number, parser: () => T): Array { + const array: T[] = new Array(length); + for (let i = 0; i < length; i++) { + array[i] = parser(); + } + return array; +} + +export class ServerboundHandshakePacket implements Packet { + public static readonly TYPE = Symbol("ServerboundHandshakePacket"); + + public readonly type = ServerboundHandshakePacket.TYPE; + + public constructor( + public readonly modVersion: string, + public readonly mojangName: string, + public readonly gameAddress: string, + public readonly dimension: string, + ) {} + + public static decode(reader: BufferReader): ServerboundHandshakePacket { + return new ServerboundHandshakePacket( + reader.readString(), + reader.readString(), + reader.readString(), + reader.readString(), + ); + } +} + +export class ClientboundAuthRequestPacket implements Packet { + public static readonly TYPE = Symbol("ClientboundAuthRequestPacket"); + + public readonly type = ClientboundAuthRequestPacket.TYPE; + + public constructor(public readonly serverSecret: Buffer) {} + + public encode(writer: BufferWriter) { + writer.writeBufWithLen(this.serverSecret); + } +} + +export class ServerboundAuthResponsePacket implements Packet { + public static readonly TYPE = Symbol("ServerboundAuthResponsePacket"); + + public readonly type = ServerboundAuthResponsePacket.TYPE; + + public constructor(public readonly clientSecret: Buffer) {} + + public static decode(reader: BufferReader): ServerboundAuthResponsePacket { + return new ServerboundAuthResponsePacket(reader.readBufWithLen()); + } +} + +export class ClientboundWelcomePacket implements Packet { + public static readonly TYPE = Symbol("Welcome"); + + public readonly type = ClientboundWelcomePacket.TYPE; + + public encode(writer: BufferWriter) {} +} + +export class ClientboundRegionTimestampsPacket implements Packet { + public static readonly TYPE = Symbol("ClientboundRegionTimestampsPacket"); + + public readonly type = ClientboundRegionTimestampsPacket.TYPE; + + public constructor( + public readonly dimension: string, + public readonly regions: Array<{ + readonly regionX: number; + readonly regionZ: number; + readonly timestamp: number; + }>, + ) {} + + public encode(writer: BufferWriter) { + writer.writeString(this.dimension); + writer.writeInt16(this.regions.length); + for (const region of this.regions) { + writer.writeInt16(region.regionX); + writer.writeInt16(region.regionZ); + writer.writeInt64(region.timestamp); + } + } +} + +export class ServerboundChunkTimestampsRequestPacket implements Packet { + public static readonly TYPE = Symbol( + "ServerboundChunkTimestampsRequestPacket", + ); + + public readonly type = ServerboundChunkTimestampsRequestPacket.TYPE; + + public constructor( + public readonly dimension: string, + public readonly regions: Array<{ + readonly regionX: number; + readonly regionZ: number; + }>, + ) {} + + public static decode( + reader: BufferReader, + ): ServerboundChunkTimestampsRequestPacket { + return new ServerboundChunkTimestampsRequestPacket( + reader.readString(), + readArray(reader.readInt16(), () => ({ + regionX: reader.readInt16(), + regionZ: reader.readInt16(), + })), + ); + } +} + +export class ClientboundChunkTimestampsResponsePacket implements Packet { + public static readonly TYPE = Symbol( + "ClientboundChunkTimestampsResponsePacket", + ); + + public readonly type = ClientboundChunkTimestampsResponsePacket.TYPE; + + public constructor( + public readonly dimension: string, + public readonly chunks: Array<{ + readonly chunkX: number; + readonly chunkZ: number; + readonly timestamp: number; + }>, + ) {} + + public encode(writer: BufferWriter) { + writer.writeString(this.dimension); + writer.writeUnt32(this.chunks.length); + for (const chunk of this.chunks) { + writer.writeInt32(chunk.chunkX); + writer.writeInt32(chunk.chunkZ); + writer.writeUnt64(chunk.timestamp); + } + } +} + +export class ServerboundCatchupRequestPacket implements Packet { + public static readonly TYPE = Symbol("ServerboundCatchupRequestPacket"); + + public readonly type = ServerboundCatchupRequestPacket.TYPE; + + public constructor( + public readonly dimension: string, + public readonly chunks: Array<{ + readonly chunkX: number; + readonly chunkZ: number; + readonly timestamp: number; + }>, + ) {} + + public static decode( + reader: BufferReader, + ): ServerboundCatchupRequestPacket { + return new ServerboundCatchupRequestPacket( + reader.readString(), + readArray(reader.readUnt32(), () => ({ + chunkX: reader.readInt32(), + chunkZ: reader.readInt32(), + timestamp: reader.readUnt64(), + })), + ); + } +} + +export class ChunkTilePacket implements Packet { + public static readonly TYPE = Symbol("ChunkTilePacket"); + + public readonly type = ChunkTilePacket.TYPE; + + public constructor( + public readonly dimension: string, + public readonly chunkX: number, + public readonly chunkZ: number, + public readonly timestamp: number, + public readonly version: number, + public readonly hash: Buffer, + public readonly data: Buffer, + ) {} + + public encode(writer: BufferWriter) { + writer.writeString(this.dimension); + writer.writeInt32(this.chunkX); + writer.writeInt32(this.chunkZ); + writer.writeUnt64(this.timestamp); + writer.writeUnt16(this.version); + writer.writeBufRaw(this.hash); + writer.writeBufRaw(this.data); // XXX do we need to prefix with length? + } + + public static decode(reader: BufferReader): ChunkTilePacket { + return new ChunkTilePacket( + reader.readString(), + reader.readInt32(), + reader.readInt32(), + reader.readUnt64(), + reader.readUnt16(), + reader.readBufLen(SHA1_HASH_LENGTH), + reader.readRemainder(), + ); + } +} diff --git a/server/src/net/protocol.ts b/server/src/net/protocol.ts new file mode 100644 index 00000000..9de378ee --- /dev/null +++ b/server/src/net/protocol.ts @@ -0,0 +1,95 @@ +import { BufferWriter, BufferReader } from "./buffers.ts"; +import { + ChunkTilePacket, + ClientboundAuthRequestPacket, + ClientboundRegionTimestampsPacket, + ServerboundChunkTimestampsRequestPacket, + ServerboundAuthResponsePacket, + ServerboundHandshakePacket, + ClientboundChunkTimestampsResponsePacket, + ServerboundCatchupRequestPacket, + ClientboundWelcomePacket, +} from "./packets.ts"; + +export type ClientPacket = + | ChunkTilePacket + | ServerboundAuthResponsePacket + | ServerboundHandshakePacket + | ServerboundCatchupRequestPacket + | ServerboundChunkTimestampsRequestPacket; + +export type ServerPacket = + | ChunkTilePacket + | ClientboundAuthRequestPacket + | ClientboundChunkTimestampsResponsePacket + | ClientboundRegionTimestampsPacket + | ClientboundWelcomePacket; + +export const packetIds = [ + "ERROR:pkt0", + ServerboundHandshakePacket.TYPE, + ClientboundAuthRequestPacket.TYPE, + ServerboundAuthResponsePacket.TYPE, + ChunkTilePacket.TYPE, + ClientboundChunkTimestampsResponsePacket.TYPE, + ServerboundCatchupRequestPacket.TYPE, + ClientboundRegionTimestampsPacket.TYPE, + ServerboundChunkTimestampsRequestPacket.TYPE, + ClientboundWelcomePacket.TYPE, +]; + +export function getPacketId(type: ServerPacket["type"]) { + const id = packetIds.indexOf(type); + if (id <= 0) throw new Error(`Unknown packet type ${type.toString()}`); + return id; +} + +export function decodePacket(reader: BufferReader): ClientPacket { + const packetType = reader.readUnt8(); + switch (packetIds[packetType]) { + case ChunkTilePacket.TYPE: + return ChunkTilePacket.decode(reader); + case ServerboundHandshakePacket.TYPE: + return ServerboundHandshakePacket.decode(reader); + case ServerboundAuthResponsePacket.TYPE: + return ServerboundAuthResponsePacket.decode(reader); + case ServerboundCatchupRequestPacket.TYPE: + return ServerboundCatchupRequestPacket.decode(reader); + case ServerboundChunkTimestampsRequestPacket.TYPE: + return ServerboundChunkTimestampsRequestPacket.decode(reader); + default: + throw new Error(`Unknown packet type ${packetType}`); + } +} + +export function encodePacket(packet: ServerPacket, writer: BufferWriter): void { + writer.writeUnt8(getPacketId(packet.type)); + switch (packet.type) { + case ChunkTilePacket.TYPE: + return (packet as ChunkTilePacket).encode(writer); + case ClientboundChunkTimestampsResponsePacket.TYPE: + return (packet as ClientboundChunkTimestampsResponsePacket).encode( + writer, + ); + case ClientboundAuthRequestPacket.TYPE: + return (packet as ClientboundAuthRequestPacket).encode(writer); + case ClientboundWelcomePacket.TYPE: + return (packet as ClientboundWelcomePacket).encode(writer); + case ClientboundRegionTimestampsPacket.TYPE: + return (packet as ClientboundRegionTimestampsPacket).encode(writer); + default: + throw new Error(`Unknown packet type ${(packet as any).type}`); + } +} + +export function encodePacketToBytes(packet: ServerPacket): Buffer { + const writer = new BufferWriter(); + encodePacket(packet, writer); + return writer.getBuffer(); +} + +export class UnexpectedPacket extends Error { + public constructor(message?: string) { + super(message); + } +} diff --git a/server/src/net/server.ts b/server/src/net/server.ts new file mode 100644 index 00000000..c834ea64 --- /dev/null +++ b/server/src/net/server.ts @@ -0,0 +1,195 @@ +import { serve, type Server, type ServerWebSocket } from "bun"; + +import { + type ClientPacket, + decodePacket, + encodePacketToBytes, + type ServerPacket, + UnexpectedPacket, +} from "./protocol.ts"; +import { BufferReader } from "./buffers.ts"; +import { + ChunkTilePacket, + ServerboundCatchupRequestPacket, + ServerboundChunkTimestampsRequestPacket, + ServerboundAuthResponsePacket, + ServerboundHandshakePacket, +} from "./packets.ts"; +import { + handleConnected, + handleAuthResponse, + handleHandshake, +} from "./auth.ts"; +import { MAX_WS_FRAME_LENGTH } from "../constants.ts"; + +export interface ProtocolHandler { + handleClientConnected(client: TcpClient): Promise; + + handleClientDisconnected(client: TcpClient): Promise; + + handleClientAuthenticated(client: TcpClient): Promise; + + handleClientPacketReceived( + client: TcpClient, + packet: ClientPacket, + ): Promise; +} + +export class TcpServer { + public readonly server: Server; + public readonly clients = new Map(); + + public constructor( + host: string, + port: number, + public readonly handlers: ProtocolHandler, + ) { + const self = this; + + this.server = serve({ + hostname: host, + port: port, + async fetch(req, server) { + const url = URL.parse(req.url); + if (url === null) { + return new Response(null, { + status: 400, + }); + } + if (url.pathname !== "/") { + return new Response(null, { + status: 404, + }); + } + if (!server.upgrade(req)) { + return new Response(null, { + status: 426, + }); + } + // Bun automatically returns a 101 Switching Protocols + return undefined; + }, + websocket: { + maxPayloadLength: MAX_WS_FRAME_LENGTH, + // Allow 20 full frames of data of backpressure. Keep in mind + // that this is still >12x less than the default backpressure + // of 16MB. + backpressureLimit: MAX_WS_FRAME_LENGTH * 20, + closeOnBackpressureLimit: true, + idleTimeout: 60, // 60 seconds + + async open(socket) { + const client = new TcpClient(socket, self.handlers); + self.clients.set(client.id, (socket.data = client)); + await handleConnected(client); + await self.handlers.handleClientConnected(client); + client.log("Connected"); + }, + async close(socket, code, reason) { + const client: TcpClient = socket.data; + self.clients.delete(client.id); + await self.handlers.handleClientDisconnected(client); + client.log( + `Disconnected (Code: ${code}) (Reason: ${reason})`, + ); + }, + async message(socket, message) { + const client: TcpClient = socket.data; + if (typeof message === "string") { + socket.close(1003, "String messages are not supported"); + return; + } + try { + const reader = new BufferReader(message); + const packet = decodePacket(reader); + const remainder = reader.remainder; + if (remainder > 0) { + throw new Error( + `Packet did not consume all data! Remainder: [${remainder}]`, + ); + } + await client.handlePacketReceived(packet); + } catch (err) { + client.warn(err); + client.kick("Error in packet handler"); + return; + } + }, + }, + }); + console.log("[WsServer] Listening on", host, port); + } +} + +let nextClientId = 1; + +/** Prefixes packets with their length (UInt32BE); + * handles Mojang authentication */ +export class TcpClient { + public readonly id = nextClientId++; + /** contains mojang name once logged in */ + public name = "Client" + this.id; + + public gameAddress: string | null = null; + public dimension: string | null = null; + + /** sent by client during handshake */ + public auth: any; + + public constructor( + private socket: ServerWebSocket, + public handlers: ProtocolHandler, + ) {} + + async handlePacketReceived(packet: ClientPacket) { + this.debug("Received packet: " + packet.type.toString()); + switch (packet.type) { + case ServerboundHandshakePacket.TYPE: + await handleHandshake( + this, + packet as ServerboundHandshakePacket, + ); + return; + case ServerboundAuthResponsePacket.TYPE: + await handleAuthResponse( + this, + packet as ServerboundAuthResponsePacket, + ); + return; + case ServerboundChunkTimestampsRequestPacket.TYPE: + case ServerboundCatchupRequestPacket.TYPE: + case ChunkTilePacket.TYPE: + await this.handlers.handleClientPacketReceived(this, packet); + return; + default: + throw new UnexpectedPacket(packet.type.toString()); + } + } + + public kick(internalReason: string) { + this.log(`Kicking:`, internalReason); + this.socket.close(); + } + + public async send(packet: ServerPacket) { + await this.sendRaw(packet.type, encodePacketToBytes(packet)); + } + + public async sendRaw(type: Symbol, raw: Buffer) { + this.debug("Sending packet: " + type.toString()); + this.socket.sendBinary(raw); + } + + public debug(...args: any[]) { + if (process.env.NODE_ENV === "production") return; + console.debug(`[${this.name}]`, ...args); + } + + public log(...args: any[]) { + console.log(`[${this.name}]`, ...args); + } + + public warn(...args: any[]) { + console.error(`[${this.name}]`, ...args); + } +} diff --git a/server/src/protocol/BufReader.ts b/server/src/protocol/BufReader.ts deleted file mode 100644 index e4d39ef2..00000000 --- a/server/src/protocol/BufReader.ts +++ /dev/null @@ -1,100 +0,0 @@ -/** Each read advances the internal offset into the buffer. */ -export class BufReader { - private off = 0; - private offStack: number[] = []; - - constructor(private buf: Buffer) {} - - saveOffset() { - this.offStack.push(this.off); - } - - restoreOffset() { - const off = this.offStack.pop(); - if (off === undefined) throw new Error("Offset stack is empty"); - this.off = off; - } - - readUInt8() { - const val = this.buf.readUInt8(this.off); - this.off += 1; - return val; - } - - readInt8() { - const val = this.buf.readInt8(this.off); - this.off += 1; - return val; - } - - readUInt16() { - const val = this.buf.readUInt16BE(this.off); - this.off += 2; - return val; - } - - readInt16() { - const val = this.buf.readInt16BE(this.off); - this.off += 2; - return val; - } - - readUInt32() { - const val = this.buf.readUInt32BE(this.off); - this.off += 4; - return val; - } - - readInt32() { - const val = this.buf.readInt32BE(this.off); - this.off += 4; - return val; - } - - readUInt64() { - const valBig = this.buf.readBigUInt64BE(this.off); - if (valBig > Number.MAX_SAFE_INTEGER) { - throw new Error(`64-bit number too big: ${valBig}`); - } - this.off += 8; - return Number(valBig); - } - - readInt64() { - const valBig = this.buf.readBigInt64BE(this.off); - if (valBig > Number.MAX_SAFE_INTEGER) { - throw new Error(`64-bit number too big: ${valBig}`); - } - if (valBig < Number.MIN_SAFE_INTEGER) { - throw new Error(`64-bit number too small: ${valBig}`); - } - this.off += 8; - return Number(valBig); - } - - /** length-prefixed (32 bits), UTF-8 encoded */ - readString() { - const len = this.readUInt32(); - const str = this.buf.toString("utf8", this.off, this.off + len); - this.off += len; - return str; - } - - readBufWithLen() { - const len = this.readUInt32(); - return this.readBufLen(len); - } - - readBufLen(length: number) { - // simply returning a slice() would retain the entire buf in memory - const buf = Buffer.allocUnsafe(length); - this.buf.copy(buf, 0, this.off, this.off + length); - this.off += length; - return buf; - } - - /** any reads after this will fail */ - readRemainder() { - return this.readBufLen(this.buf.length - this.off); - } -} diff --git a/server/src/protocol/BufWriter.ts b/server/src/protocol/BufWriter.ts deleted file mode 100644 index 0dba9ea9..00000000 --- a/server/src/protocol/BufWriter.ts +++ /dev/null @@ -1,100 +0,0 @@ -/** Each write advances the internal offset into the buffer. - * Grows the buffer to twice the current size if a write would exceed the buffer. */ -export class BufWriter { - private off = 0; - private buf: Buffer; - - constructor(initialSize?: number) { - this.buf = Buffer.alloc(initialSize || 1024); - } - - /** Returns a slice reference to the written bytes so far. */ - getBuffer() { - return this.buf.slice(0, this.off); - } - - writeUInt8(val: number) { - this.ensureSpace(1); - this.buf.writeUInt8(val, this.off); - this.off += 1; - } - - writeInt8(val: number) { - this.ensureSpace(1); - this.buf.writeInt8(val, this.off); - this.off += 1; - } - - writeUInt16(val: number) { - this.ensureSpace(2); - this.buf.writeUInt16BE(val, this.off); - this.off += 2; - } - - writeInt16(val: number) { - this.ensureSpace(2); - this.buf.writeInt16BE(val, this.off); - this.off += 2; - } - - writeUInt32(val: number) { - this.ensureSpace(4); - this.buf.writeUInt32BE(val, this.off); - this.off += 4; - } - - writeInt32(val: number) { - this.ensureSpace(4); - this.buf.writeInt32BE(val, this.off); - this.off += 4; - } - - writeUInt64(val: number) { - this.ensureSpace(8); - this.buf.writeBigUInt64BE(BigInt(val), this.off); - this.off += 8; - } - - writeInt64(val: number) { - this.ensureSpace(8); - this.buf.writeBigInt64BE(BigInt(val), this.off); - this.off += 8; - } - - /** length-prefixed (32 bits), UTF-8 encoded */ - writeString(str: string) { - const strBuf = Buffer.from(str, "utf8"); - this.ensureSpace(4 + strBuf.length); - this.buf.writeUInt32BE(strBuf.length, this.off); - this.off += 4; - this.buf.set(strBuf, this.off); - this.off += strBuf.length; - } - - /** length-prefixed (32 bits), UTF-8 encoded */ - writeBufWithLen(buf: Buffer) { - this.ensureSpace(4 + buf.length); - this.buf.writeUInt32BE(buf.length, this.off); - this.off += 4; - this.buf.set(buf, this.off); - this.off += buf.length; - } - - writeBufRaw(buf: Buffer) { - this.ensureSpace(buf.length); - this.buf.set(buf, this.off); - this.off += buf.length; - } - - private ensureSpace(bytes: number) { - let len = this.buf.length; - while (len <= this.off + bytes) { - len = len * 2; - } - if (len !== this.buf.length) { - const newBuf = Buffer.alloc(len); - this.buf.copy(newBuf, 0, 0, this.off); - this.buf = newBuf; - } - } -} diff --git a/server/src/protocol/CatchupPacket.ts b/server/src/protocol/CatchupPacket.ts deleted file mode 100644 index d05f839b..00000000 --- a/server/src/protocol/CatchupPacket.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { type CatchupChunk } from "../model"; -import { BufWriter } from "./BufWriter"; - -export interface CatchupPacket { - type: "Catchup"; - world: string; - chunks: CatchupChunk[]; -} - -export namespace CatchupPacket { - export function encode(pkt: CatchupPacket, writer: BufWriter) { - if (pkt.chunks.length < 1) - throw new Error(`Catchup chunks must not be empty`); - writer.writeString(pkt.world); - writer.writeUInt32(pkt.chunks.length); - for (const row of pkt.chunks) { - writer.writeInt32(row.chunkX); - writer.writeInt32(row.chunkZ); - writer.writeUInt64(row.timestamp); - } - } -} diff --git a/server/src/protocol/CatchupRequestPacket.ts b/server/src/protocol/CatchupRequestPacket.ts deleted file mode 100644 index a14ddc86..00000000 --- a/server/src/protocol/CatchupRequestPacket.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { type CatchupChunk } from "../model"; -import { BufReader } from "./BufReader"; - -export interface CatchupRequestPacket { - type: "CatchupRequest"; - world: string; - chunks: CatchupChunk[]; -} - -export namespace CatchupRequestPacket { - export function decode(reader: BufReader): CatchupRequestPacket { - const world = reader.readString(); - const chunks: CatchupChunk[] = new Array(reader.readUInt32()); - for (let i = 0; i < chunks.length; i++) { - chunks[i] = { - chunkX: reader.readInt32(), - chunkZ: reader.readInt32(), - timestamp: reader.readUInt64(), - }; - } - return { type: "CatchupRequest", world, chunks }; - } -} diff --git a/server/src/protocol/ChunkTilePacket.ts b/server/src/protocol/ChunkTilePacket.ts deleted file mode 100644 index eee9f326..00000000 --- a/server/src/protocol/ChunkTilePacket.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { BufReader } from "./BufReader"; -import { BufWriter } from "./BufWriter"; -import { SHA1_HASH_LENGTH } from "../constants"; - -export interface ChunkTilePacket { - type: "ChunkTile"; - world: string; - chunk_x: number; - chunk_z: number; - ts: number; - data: { version: number; hash: Buffer; data: Buffer }; -} - -export namespace ChunkTilePacket { - export function decode(reader: BufReader): ChunkTilePacket { - return { - type: "ChunkTile", - world: reader.readString(), - chunk_x: reader.readInt32(), - chunk_z: reader.readInt32(), - ts: reader.readUInt64(), - data: { - version: reader.readUInt16(), - hash: reader.readBufLen(SHA1_HASH_LENGTH), - data: reader.readRemainder(), - }, - }; - } - - export function encode(pkt: ChunkTilePacket, writer: BufWriter) { - writer.writeString(pkt.world); - writer.writeInt32(pkt.chunk_x); - writer.writeInt32(pkt.chunk_z); - writer.writeUInt64(pkt.ts); - writer.writeUInt16(pkt.data.version); - writer.writeBufRaw(pkt.data.hash); - writer.writeBufRaw(pkt.data.data); // XXX do we need to prefix with length? - } -} diff --git a/server/src/protocol/EncryptionRequestPacket.ts b/server/src/protocol/EncryptionRequestPacket.ts deleted file mode 100644 index 148e4212..00000000 --- a/server/src/protocol/EncryptionRequestPacket.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { BufReader } from "./BufReader"; -import { BufWriter } from "./BufWriter"; - -export interface EncryptionRequestPacket { - type: "EncryptionRequest"; - publicKey: Buffer; - verifyToken: Buffer; -} - -export namespace EncryptionRequestPacket { - export function decode(reader: BufReader): EncryptionRequestPacket { - return { - type: "EncryptionRequest", - publicKey: reader.readBufWithLen(), - verifyToken: reader.readBufWithLen(), - }; - } - - export function encode(pkt: EncryptionRequestPacket, writer: BufWriter) { - writer.writeBufWithLen(pkt.publicKey); - writer.writeBufWithLen(pkt.verifyToken); - } -} diff --git a/server/src/protocol/EncryptionResponsePacket.ts b/server/src/protocol/EncryptionResponsePacket.ts deleted file mode 100644 index e17adc5f..00000000 --- a/server/src/protocol/EncryptionResponsePacket.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { BufReader } from "./BufReader"; -import { BufWriter } from "./BufWriter"; - -export interface EncryptionResponsePacket { - type: "EncryptionResponse"; - /** encrypted with server's public key */ - sharedSecret: Buffer; - /** encrypted with server's public key */ - verifyToken: Buffer; -} - -export namespace EncryptionResponsePacket { - export function decode(reader: BufReader): EncryptionResponsePacket { - return { - type: "EncryptionResponse", - sharedSecret: reader.readBufWithLen(), - verifyToken: reader.readBufWithLen(), - }; - } - - export function encode(pkt: EncryptionResponsePacket, writer: BufWriter) { - writer.writeBufWithLen(pkt.sharedSecret); - writer.writeBufWithLen(pkt.verifyToken); - } -} diff --git a/server/src/protocol/HandshakePacket.ts b/server/src/protocol/HandshakePacket.ts deleted file mode 100644 index 32bd4b82..00000000 --- a/server/src/protocol/HandshakePacket.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { BufReader } from "./BufReader"; -import { BufWriter } from "./BufWriter"; - -export interface HandshakePacket { - type: "Handshake"; - modVersion: string; - mojangName: string; - gameAddress: string; - world: string; -} - -export namespace HandshakePacket { - export function decode(reader: BufReader): HandshakePacket { - return { - type: "Handshake", - modVersion: reader.readString(), - mojangName: reader.readString(), - gameAddress: reader.readString(), - world: reader.readString(), - }; - } -} diff --git a/server/src/protocol/RegionCatchupPacket.ts b/server/src/protocol/RegionCatchupPacket.ts deleted file mode 100644 index 13890d9b..00000000 --- a/server/src/protocol/RegionCatchupPacket.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { BufReader } from "./BufReader"; -import { type Pos2D } from "../model"; - -export interface RegionCatchupPacket { - type: "RegionCatchup"; - world: string; - regions: Pos2D[]; -} - -export namespace RegionCatchupPacket { - export function decode(reader: BufReader): RegionCatchupPacket { - let world = reader.readString(); - const len = reader.readInt16(); - const regions: Pos2D[] = []; - for (let i = 0; i < len; i++) { - regions.push({ - x: reader.readInt16(), - z: reader.readInt16(), - }); - } - return { type: "RegionCatchup", world, regions }; - } -} diff --git a/server/src/protocol/RegionTimestampsPacket.ts b/server/src/protocol/RegionTimestampsPacket.ts deleted file mode 100644 index e99a151c..00000000 --- a/server/src/protocol/RegionTimestampsPacket.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { BufWriter } from "./BufWriter"; -import { CatchupRegion } from "../model"; - -export interface RegionTimestampsPacket { - type: "RegionTimestamps"; - world: string; - regions: Array; -} - -export namespace RegionTimestampsPacket { - export function encode(pkt: RegionTimestampsPacket, writer: BufWriter) { - writer.writeString(pkt.world); - writer.writeInt16(pkt.regions.length); - console.log("Sending regions " + JSON.stringify(pkt.regions)); - for (let i = 0; i < pkt.regions.length; i++) { - let region = pkt.regions[i]; - writer.writeInt16(region.regionX); - writer.writeInt16(region.regionZ); - writer.writeInt64(region.timestamp); - } - } -} diff --git a/server/src/protocol/index.ts b/server/src/protocol/index.ts deleted file mode 100644 index da615fb8..00000000 --- a/server/src/protocol/index.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { BufReader } from "./BufReader"; -import { BufWriter } from "./BufWriter"; -import { ChunkTilePacket } from "./ChunkTilePacket"; -import { EncryptionRequestPacket } from "./EncryptionRequestPacket"; -import { EncryptionResponsePacket } from "./EncryptionResponsePacket"; -import { HandshakePacket } from "./HandshakePacket"; -import { CatchupPacket } from "./CatchupPacket"; -import { CatchupRequestPacket } from "./CatchupRequestPacket"; -import { RegionTimestampsPacket } from "./RegionTimestampsPacket"; -import { RegionCatchupPacket } from "./RegionCatchupPacket"; - -export type ClientPacket = - | ChunkTilePacket - | EncryptionResponsePacket - | HandshakePacket - | CatchupRequestPacket - | RegionCatchupPacket; - -export type ServerPacket = - | ChunkTilePacket - | EncryptionRequestPacket - | CatchupPacket - | RegionTimestampsPacket; - -export const packetIds = [ - "ERROR:pkt0", - "Handshake", - "EncryptionRequest", - "EncryptionResponse", - "ChunkTile", - "Catchup", - "CatchupRequest", - "RegionTimestamps", - "RegionCatchup", -]; - -export function getPacketId(type: ServerPacket["type"]) { - const id = packetIds.indexOf(type); - if (id === -1) throw new Error(`Unknown packet type ${type}`); - return id; -} - -export function decodePacket(reader: BufReader): ClientPacket { - const packetType = reader.readUInt8(); - switch (packetIds[packetType]) { - case "ChunkTile": - return ChunkTilePacket.decode(reader); - case "Handshake": - return HandshakePacket.decode(reader); - case "EncryptionResponse": - return EncryptionResponsePacket.decode(reader); - case "CatchupRequest": - return CatchupRequestPacket.decode(reader); - case "RegionCatchup": - return RegionCatchupPacket.decode(reader); - default: - throw new Error(`Unknown packet type ${packetType}`); - } -} - -export function encodePacket(pkt: ServerPacket, writer: BufWriter): void { - writer.writeUInt8(getPacketId(pkt.type)); - switch (pkt.type) { - case "ChunkTile": - return ChunkTilePacket.encode(pkt, writer); - case "Catchup": - return CatchupPacket.encode(pkt, writer); - case "EncryptionRequest": - return EncryptionRequestPacket.encode(pkt, writer); - case "RegionTimestamps": - return RegionTimestampsPacket.encode(pkt, writer); - default: - throw new Error(`Unknown packet type ${(pkt as any).type}`); - } -} diff --git a/server/src/server.ts b/server/src/server.ts deleted file mode 100644 index 73e21bf4..00000000 --- a/server/src/server.ts +++ /dev/null @@ -1,340 +0,0 @@ -import crypto from "crypto"; -import net from "net"; -import { Main } from "./main"; -import type { ClientPacket, ServerPacket } from "./protocol"; -import { decodePacket, encodePacket } from "./protocol"; -import { BufReader } from "./protocol/BufReader"; -import { BufWriter } from "./protocol/BufWriter"; -import { EncryptionResponsePacket } from "./protocol/EncryptionResponsePacket"; -import { HandshakePacket } from "./protocol/HandshakePacket"; -import { SUPPORTED_VERSIONS } from "./constants"; - -const { PORT = "12312", HOST = "127.0.0.1" } = process.env; - -type ProtocolHandler = Main; // TODO cleanup - -export class TcpServer { - server: net.Server; - clients: Record = {}; - - keyPair = crypto.generateKeyPairSync("rsa", { modulusLength: 1024 }); - // precomputed for networking - publicKeyBuffer = this.keyPair.publicKey.export({ - type: "spki", - format: "der", - }); - - constructor(readonly handler: ProtocolHandler) { - this.server = net.createServer({}, (socket) => { - const client = new TcpClient(socket, this, handler); - this.clients[client.id] = client; - socket.on("close", () => delete this.clients[client.id]); - }); - - this.server.on("error", (err: Error) => { - console.error("[TcpServer] Error:", err); - this.server.close(); - }); - - this.server.listen({ port: PORT, hostname: HOST }, () => { - console.log("[TcpServer] Listening on", HOST, PORT); - }); - } - - decrypt(buf: Buffer) { - return crypto.privateDecrypt( - { - key: this.keyPair.privateKey, - padding: crypto.constants.RSA_PKCS1_PADDING, - }, - buf, - ); - } -} - -let nextClientId = 1; - -/** Prefixes packets with their length (UInt32BE); - * handles Mojang authentication */ -export class TcpClient { - readonly id = nextClientId++; - /** contains mojang name once logged in */ - name = "Client" + this.id; - - modVersion: string | undefined; - gameAddress: string | undefined; - uuid: string | undefined; - mcName: string | undefined; - world: string | undefined; - - /** prevent Out of Memory when client sends a large packet */ - maxFrameSize = 2 ** 15; - - /** sent by client during handshake */ - private claimedMojangName?: string; - private verifyToken?: Buffer; - /** we need to wait for the mojang auth response - * before we can en/decrypt packets following the handshake */ - private cryptoPromise?: Promise<{ - cipher: crypto.Cipher; - decipher: crypto.Decipher; - }>; - - constructor( - private socket: net.Socket, - private server: TcpServer, - private handler: ProtocolHandler, - ) { - this.log("Connected from", socket.remoteAddress); - handler.handleClientConnected(this); - - /** Accumulates received data, containing none, one, or multiple frames; the last frame may be partial only. */ - let accBuf: Buffer = Buffer.alloc(0); - - socket.on("data", async (data: Buffer) => { - try { - if (this.cryptoPromise) { - const { decipher } = await this.cryptoPromise; - data = decipher.update(data); - } - - // creating a new buffer every time is fine in our case, because we expect most frames to be large - accBuf = Buffer.concat([accBuf, data]); - - // we may receive multiple frames in one call - while (true) { - if (accBuf.length <= 4) return; // wait for more data - const frameSize = accBuf.readUInt32BE(); - - // prevent Out of Memory - if (frameSize > this.maxFrameSize) { - return this.kick( - "Frame too large: " + - frameSize + - " have " + - accBuf.length, - ); - } - - if (accBuf.length < 4 + frameSize) return; // wait for more data - - const frameReader = new BufReader(accBuf); - frameReader.readUInt32(); // skip frame size - let pktBuf = frameReader.readBufLen(frameSize); - accBuf = frameReader.readRemainder(); - - const reader = new BufReader(pktBuf); - - try { - const packet = decodePacket(reader); - await this.handlePacketReceived(packet); - } catch (err) { - this.warn(err); - return this.kick("Error in packet handler"); - } - } - } catch (err) { - this.warn(err); - return this.kick("Error in data handler"); - } - }); - - socket.on("close", (hadError: boolean) => { - this.log("Closed.", { hadError }); - }); - - socket.on("end", () => { - // This event is called when the other end signals the end of transmission, meaning this client is - // still writeable, but no longer readable. In this situation we just want to close the socket. - // https://nodejs.org/dist/latest-v18.x/docs/api/net.html#event-end - this.kick("Ended"); - }); - - socket.on("timeout", () => { - // As per the docs, the socket needs to be manually closed. - // https://nodejs.org/dist/latest-v18.x/docs/api/net.html#event-timeout - this.kick("Timed out"); - }); - - socket.on("error", (err: Error) => { - this.warn("Error:", err); - this.kick("Socket error"); - }); - } - - private async handlePacketReceived(pkt: ClientPacket) { - if (!this.uuid) { - // not authenticated yet - switch (pkt.type) { - case "Handshake": - return await this.handleHandshakePacket(pkt); - case "EncryptionResponse": - return await this.handleEncryptionResponsePacket(pkt); - } - throw new Error( - `Packet ${pkt.type} from unauth'd client ${this.id}`, - ); - } else { - return await this.handler.handleClientPacketReceived(this, pkt); - } - } - - kick(internalReason: string) { - this.log(`Kicking:`, internalReason); - this.socket.destroy(); - } - - async send(pkt: ServerPacket) { - if (!this.cryptoPromise) { - this.debug("Not encrypted, dropping packet", pkt.type); - return; - } - if (!this.uuid) { - this.debug("Not authenticated, dropping packet", pkt.type); - return; - } - this.debug(this.mcName + " -> " + pkt.type); - await this.sendInternal(pkt, true); - } - - private async sendInternal(pkt: ServerPacket, doCrypto = false) { - if (!this.socket.writable) - return this.debug("Socket closed, dropping", pkt.type); - if (doCrypto && !this.cryptoPromise) - throw new Error(`Can't encrypt: handshake not finished`); - - const writer = new BufWriter(); // TODO size hint - writer.writeUInt32(0); // set later, but reserve space in buffer - encodePacket(pkt, writer); - let buf = writer.getBuffer(); - buf.writeUInt32BE(buf.length - 4, 0); // write into space reserved above - - if (doCrypto) { - const { cipher } = await this.cryptoPromise!; - buf = cipher!.update(buf); - } - - this.socket.write(buf); - } - - private async handleHandshakePacket(packet: HandshakePacket) { - if (this.cryptoPromise) throw new Error(`Already authenticated`); - if (this.verifyToken) throw new Error(`Encryption already started`); - - if (!SUPPORTED_VERSIONS.has(packet.modVersion)) { - this.kick( - "Connected with unsupported version [" + - packet.modVersion + - "]", - ); - return; - } - - this.gameAddress = packet.gameAddress; - this.claimedMojangName = packet.mojangName; - this.world = packet.world; - this.verifyToken = crypto.randomBytes(4); - - await this.sendInternal({ - type: "EncryptionRequest", - publicKey: this.server.publicKeyBuffer, - verifyToken: this.verifyToken, - }); - } - - private async handleEncryptionResponsePacket( - pkt: EncryptionResponsePacket, - ) { - if (this.cryptoPromise) throw new Error(`Already authenticated`); - if (!this.claimedMojangName) - throw new Error(`Encryption has not started: no mojangName`); - if (!this.verifyToken) - throw new Error(`Encryption has not started: no verifyToken`); - - const verifyToken = this.server.decrypt(pkt.verifyToken); - if (!this.verifyToken.equals(verifyToken)) { - throw new Error( - `verifyToken mismatch: got ${verifyToken} expected ${this.verifyToken}`, - ); - } - - const secret = this.server.decrypt(pkt.sharedSecret); - - const shaHex = crypto - .createHash("sha1") - .update(secret) - .update(this.server.publicKeyBuffer) - .digest() - .toString("hex"); - - this.cryptoPromise = fetchHasJoined({ - username: this.claimedMojangName, - shaHex, - }).then(async (mojangAuth) => { - if (!mojangAuth?.uuid) { - this.kick(`Mojang auth failed`); - throw new Error(`Mojang auth failed`); - } - - this.log("Authenticated as", mojangAuth); - - this.uuid = mojangAuth.uuid; - this.mcName = mojangAuth.name; - this.name += ":" + mojangAuth.name; - - return { - cipher: crypto.createCipheriv("aes-128-cfb8", secret, secret), - decipher: crypto.createDecipheriv( - "aes-128-cfb8", - secret, - secret, - ), - }; - }); - - await this.cryptoPromise.then(async () => { - await this.handler.handleClientAuthenticated(this); - }); - } - - debug(...args: any[]) { - if (process.env.NODE_ENV === "production") return; - console.debug(`[${this.name}]`, ...args); - } - - log(...args: any[]) { - console.log(`[${this.name}]`, ...args); - } - - warn(...args: any[]) { - console.error(`[${this.name}]`, ...args); - } -} - -async function fetchHasJoined(args: { - username: string; - shaHex: string; - clientIp?: string; -}) { - const { username, shaHex, clientIp } = args; - - // if auth is disabled, return a "usable" item - if ("DISABLE_AUTH" in process.env) - return { name: username, uuid: `AUTH-DISABLED-${username}` }; - - let url = `https://sessionserver.mojang.com/session/minecraft/hasJoined?username=${username}&serverId=${shaHex}`; - if (clientIp) url += `&ip=${clientIp}`; - const res = await fetch(url); - try { - if (res.status === 204) return null; - let { id, name } = (await res.json()) as { id: string; name: string }; - const uuid = id.replace( - /^(........)-?(....)-?(....)-?(....)-?(............)$/, - "$1-$2-$3-$4-$5", - ); - return { uuid, name }; - } catch (err) { - console.error(res); - throw err; - } -} diff --git a/server/tsconfig.json b/server/tsconfig.json index a21b0116..44f17047 100644 --- a/server/tsconfig.json +++ b/server/tsconfig.json @@ -1,22 +1,31 @@ { "compilerOptions": { - "allowSyntheticDefaultImports": true, - "esModuleInterop": true, - "experimentalDecorators": true, - "forceConsistentCasingInFileNames": true, - "isolatedModules": true, - "lib": ["esnext", "webworker"], - "module": "CommonJS", - "moduleResolution": "node", - "noImplicitReturns": true, + // Enable latest features + "lib": ["ESNext"], + "target": "ESNext", + "module": "ESNext", + "moduleDetection": "force", + "jsx": "react-jsx", + "allowJs": true, + + // Bundler mode + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "verbatimModuleSyntax": true, + "noEmit": true, + + // Best practices + "strict": true, + "skipLibCheck": true, "noFallthroughCasesInSwitch": true, + + // Some stricter flags (disabled by default) + "noUnusedLocals": false, + "noUnusedParameters": false, + "noPropertyAccessFromIndexSignature": false, + + // Custom options "noImplicitAny": true, - "outDir": "dist", - "resolveJsonModule": true, - "skipLibCheck": true, - "sourceMap": true, - "strict": true, - "target": "ESNext" - }, - "include": ["src"] + "forceConsistentCasingInFileNames": true + } } diff --git a/server/yarn.lock b/server/yarn.lock deleted file mode 100644 index db76b2f1..00000000 --- a/server/yarn.lock +++ /dev/null @@ -1,348 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@types/better-sqlite3@^7.6.4": - version "7.6.4" - resolved "https://registry.yarnpkg.com/@types/better-sqlite3/-/better-sqlite3-7.6.4.tgz#102462611e67aadf950d3ccca10292de91e6f35b" - integrity sha512-dzrRZCYPXIXfSR1/surNbJ/grU3scTaygS0OMzjlGf71i9sc2fGyHPXXiXmEvNIoE0cGwsanEFMVJxPXmco9Eg== - dependencies: - "@types/node" "*" - -"@types/node@*": - version "16.11.36" - resolved "https://registry.npmjs.org/@types/node/-/node-16.11.36.tgz" - integrity sha512-FR5QJe+TaoZ2GsMHkjuwoNabr+UrJNRr2HNOo+r/7vhcuntM6Ee/pRPOnRhhL2XE9OOvX9VLEq+BcXl3VjNoWA== - -"@types/node@^18.17.4": - version "18.17.5" - resolved "https://registry.yarnpkg.com/@types/node/-/node-18.17.5.tgz#c58b12bca8c2a437b38c15270615627e96dd0bc5" - integrity sha512-xNbS75FxH6P4UXTPUJp/zNPq6/xsfdJKussCWNOnz4aULWIRwMgP1LgaB5RiBnMX1DPCYenuqGZfnIAx5mbFLA== - -async-mutex@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/async-mutex/-/async-mutex-0.4.0.tgz#ae8048cd4d04ace94347507504b3cf15e631c25f" - integrity sha512-eJFZ1YhRR8UN8eBLoNzcDPcy/jqjsg6I1AP+KvWQX80BqOSW1oJPJXDylPUEeMr2ZQvHgnQ//Lp6f3RQ1zI7HA== - dependencies: - tslib "^2.4.0" - -base64-js@^1.3.1: - version "1.5.1" - resolved "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz" - integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== - -better-sqlite3@^9.5.0: - version "9.6.0" - resolved "https://registry.yarnpkg.com/better-sqlite3/-/better-sqlite3-9.6.0.tgz#b01e58ba7c48abcdc0383b8301206ee2ab81d271" - integrity sha512-yR5HATnqeYNVnkaUTf4bOP2dJSnyhP4puJN/QPRyx4YkBEEUxib422n2XzPqDEHjQQqazoYoADdAm5vE15+dAQ== - dependencies: - bindings "^1.5.0" - prebuild-install "^7.1.1" - -bindings@^1.5.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df" - integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ== - dependencies: - file-uri-to-path "1.0.0" - -bl@^4.0.3: - version "4.1.0" - resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" - integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== - dependencies: - buffer "^5.5.0" - inherits "^2.0.4" - readable-stream "^3.4.0" - -buffer-from@^1.0.0: - version "1.1.2" - resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz" - integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== - -buffer@^5.5.0: - version "5.7.1" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" - integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== - dependencies: - base64-js "^1.3.1" - ieee754 "^1.1.13" - -chownr@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" - integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== - -decompress-response@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-6.0.0.tgz#ca387612ddb7e104bd16d85aab00d5ecf09c66fc" - integrity sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ== - dependencies: - mimic-response "^3.1.0" - -deep-extend@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" - integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== - -detect-libc@^2.0.0: - version "2.0.1" - resolved "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz" - integrity sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w== - -dotenv@^16.0.1: - version "16.0.1" - resolved "https://registry.npmjs.org/dotenv/-/dotenv-16.0.1.tgz" - integrity sha512-1K6hR6wtk2FviQ4kEiSjFiH5rpzEVi8WW0x96aztHVMhEspNpc4DVOUTEHtEva5VThQ8IaBX1Pe4gSzpVVUsKQ== - -end-of-stream@^1.1.0, end-of-stream@^1.4.1: - version "1.4.4" - resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" - integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== - dependencies: - once "^1.4.0" - -expand-template@^2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c" - integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg== - -file-uri-to-path@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd" - integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw== - -fs-constants@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" - integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== - -github-from-package@0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/github-from-package/-/github-from-package-0.0.0.tgz#97fb5d96bfde8973313f20e8288ef9a167fa64ce" - integrity sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw== - -ieee754@^1.1.13: - version "1.2.1" - resolved "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz" - integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== - -inherits@^2.0.3, inherits@^2.0.4: - version "2.0.4" - resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -ini@~1.3.0: - version "1.3.8" - resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" - integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== - -kysely@^0.26.1: - version "0.26.1" - resolved "https://registry.yarnpkg.com/kysely/-/kysely-0.26.1.tgz#2d2fb9316d53f3062596102c98d0d476e4e097b5" - integrity sha512-FVRomkdZofBu3O8SiwAOXrwbhPZZr8mBN5ZeUWyprH29jzvy6Inzqbd0IMmGxpd4rcOCL9HyyBNWBa8FBqDAdg== - -lru-cache@^6.0.0: - version "6.0.0" - resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz" - integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== - dependencies: - yallist "^4.0.0" - -mimic-response@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9" - integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ== - -minimist@^1.2.0, minimist@^1.2.3: - version "1.2.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" - integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== - -mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3: - version "0.5.3" - resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" - integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== - -napi-build-utils@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/napi-build-utils/-/napi-build-utils-1.0.2.tgz#b1fddc0b2c46e380a0b7a76f984dd47c41a13806" - integrity sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg== - -node-abi@^3.3.0: - version "3.45.0" - resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.45.0.tgz#f568f163a3bfca5aacfce1fbeee1fa2cc98441f5" - integrity sha512-iwXuFrMAcFVi/ZoZiqq8BzAdsLw9kxDfTC0HMyjXfSL/6CSDAGD5UmR7azrAgWV1zKYq7dUUMj4owusBWKLsiQ== - dependencies: - semver "^7.3.5" - -once@^1.3.1, once@^1.4.0: - version "1.4.0" - resolved "https://registry.npmjs.org/once/-/once-1.4.0.tgz" - integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= - dependencies: - wrappy "1" - -prebuild-install@^7.1.1: - version "7.1.2" - resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-7.1.2.tgz#a5fd9986f5a6251fbc47e1e5c65de71e68c0a056" - integrity sha512-UnNke3IQb6sgarcZIDU3gbMeTp/9SSU1DAIkil7PrqG1vZlBtY5msYccSKSHDqa3hNg436IXK+SNImReuA1wEQ== - dependencies: - detect-libc "^2.0.0" - expand-template "^2.0.3" - github-from-package "0.0.0" - minimist "^1.2.3" - mkdirp-classic "^0.5.3" - napi-build-utils "^1.0.1" - node-abi "^3.3.0" - pump "^3.0.0" - rc "^1.2.7" - simple-get "^4.0.0" - tar-fs "^2.0.0" - tunnel-agent "^0.6.0" - -prettier@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.0.1.tgz#65271fc9320ce4913c57747a70ce635b30beaa40" - integrity sha512-fcOWSnnpCrovBsmFZIGIy9UqK2FaI7Hqax+DIO0A9UxeVoY4iweyaFjS5TavZN97Hfehph0nhsZnjlVKzEQSrQ== - -pump@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" - integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" - -rc@^1.2.7: - version "1.2.8" - resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" - integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== - dependencies: - deep-extend "^0.6.0" - ini "~1.3.0" - minimist "^1.2.0" - strip-json-comments "~2.0.1" - -readable-stream@^3.1.1, readable-stream@^3.4.0: - version "3.6.2" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" - integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== - dependencies: - inherits "^2.0.3" - string_decoder "^1.1.1" - util-deprecate "^1.0.1" - -safe-buffer@^5.0.1, safe-buffer@~5.2.0: - version "5.2.1" - resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== - -semver@^7.3.5: - version "7.3.7" - resolved "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz" - integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== - dependencies: - lru-cache "^6.0.0" - -simple-concat@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/simple-concat/-/simple-concat-1.0.1.tgz#f46976082ba35c2263f1c8ab5edfe26c41c9552f" - integrity sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q== - -simple-get@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-4.0.1.tgz#4a39db549287c979d352112fa03fd99fd6bc3543" - integrity sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA== - dependencies: - decompress-response "^6.0.0" - once "^1.3.1" - simple-concat "^1.0.0" - -source-map-support@^0.5.21: - version "0.5.21" - resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz" - integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map@^0.6.0: - version "0.6.1" - resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -string_decoder@^1.1.1: - version "1.3.0" - resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz" - integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== - dependencies: - safe-buffer "~5.2.0" - -strip-json-comments@~2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" - integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ== - -tar-fs@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" - integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== - dependencies: - chownr "^1.1.1" - mkdirp-classic "^0.5.2" - pump "^3.0.0" - tar-stream "^2.1.4" - -tar-stream@^2.1.4: - version "2.2.0" - resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" - integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== - dependencies: - bl "^4.0.3" - end-of-stream "^1.4.1" - fs-constants "^1.0.0" - inherits "^2.0.3" - readable-stream "^3.1.1" - -tslib@^2.4.0: - version "2.6.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.1.tgz#fd8c9a0ff42590b25703c0acb3de3d3f4ede0410" - integrity sha512-t0hLfiEKfMUoqhG+U1oid7Pva4bbDPHYfJNiB7BiIjRkj1pyC++4N3huJfqY6aRH6VTB0rvtzQwjM4K6qpfOig== - -tunnel-agent@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" - integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w== - dependencies: - safe-buffer "^5.0.1" - -typescript@^5.1.6: - version "5.1.6" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.1.6.tgz#02f8ac202b6dad2c0dd5e0913745b47a37998274" - integrity sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA== - -util-deprecate@^1.0.1: - version "1.0.2" - resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" - integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= - -wrappy@1: - version "1.0.2" - resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" - integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= - -yallist@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz" - integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== - -zod-validation-error@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/zod-validation-error/-/zod-validation-error-1.3.1.tgz#7134579d2ba3994495133b879a076786c8c270f5" - integrity sha512-cNEXpla+tREtNdAnNKY4xKY1SGOn2yzyuZMu4O0RQylX9apRpUjNcPkEc3uHIAr5Ct7LenjZt6RzjEH6+JsqVQ== - -zod@^3.21.4: - version "3.21.4" - resolved "https://registry.yarnpkg.com/zod/-/zod-3.21.4.tgz#10882231d992519f0a10b5dd58a38c9dabbb64db" - integrity sha512-m46AKbrzKVzOzs/DZgVnG5H55N1sv1M8qZU3A8RIKbs3mrACDNeIOeilDymVb2HdmP8uwshOCF4uJ8uM9rCqJw==