This commit is contained in:
Amazed 2021-12-22 15:56:35 +01:00
commit af89e219cb
14 changed files with 214 additions and 0 deletions

5
.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
blobs/
database.sqlite
.idea/
venv/
*.pyc

0
app/__init__.py Normal file
View File

7
app/config.py Normal file
View File

@ -0,0 +1,7 @@
import json
config = None
with open("config.json", "r") as fp:
print("loading config")
config = json.load(fp)

12
app/database.py Normal file
View File

@ -0,0 +1,12 @@
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
SQLALCHEMY_DATABASE_URL = "sqlite:///./database.sqlite"
engine = create_engine(
SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()

10
app/dependencies.py Normal file
View File

@ -0,0 +1,10 @@
from .database import SessionLocal
# Dependency
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()

15
app/main.py Normal file
View File

@ -0,0 +1,15 @@
from fastapi import Depends, FastAPI
from .routers import maps
import app.models
from .database import engine
from fastapi.staticfiles import StaticFiles
app.models.Base.metadata.create_all(bind=engine)
app = FastAPI(
title="Giants: Citizen Kabuto map API",
description="API to upload and download maps for Giants: Citizen Kabuto",
)
app.include_router(maps.router)
app.mount("/blobs", StaticFiles(directory="blobs"), name="static")

15
app/models.py Normal file
View File

@ -0,0 +1,15 @@
import datetime
from sqlalchemy import Column, Integer, String, DateTime
# from sqlalchemy.orm import relationship
from .database import Base
class Map(Base):
__tablename__ = "maps"
crc = Column(Integer, primary_key=True, index=True)
name = Column(String)
size = Column(Integer)
upload_date = Column(DateTime, default=datetime.datetime.utcnow)
filename = Column(String)

0
app/routers/__init__.py Normal file
View File

79
app/routers/maps.py Normal file
View File

@ -0,0 +1,79 @@
import base64
import struct
import io
import zipfile
from typing import List, Optional
from fastapi import APIRouter, Depends, HTTPException, Query
from sqlalchemy.orm import Session
from ..dependencies import get_db
from ..schemas import *
from ..utils import crc32, map_model_to_out_schema
from ..models import *
from ..config import config
router = APIRouter()
MAX_UPLOAD_SIZE = config["max_file_size"]
@router.get("/maps", tags=["maps"], response_model=List[MapOut])
async def get_all_maps(db: Session = Depends(get_db)):
return [map_model_to_out_schema(m) for m in db.query(Map).all()]
@router.get("/map", tags=["maps"], response_model=MapOut)
async def get_map_by_crc(human_crc: Optional[str] = Query(None, min_length=8, max_length=8),
crc: Optional[int] = Query(None),
db: Session = Depends(get_db)):
if not human_crc and not crc:
raise HTTPException(status_code=400, detail="Please use crc or human_crc but not both")
if human_crc and crc:
raise HTTPException(status_code=400, detail="Please use crc or human_crc but not both")
if human_crc:
crc_int = struct.unpack(">L", bytes.fromhex(human_crc))[0]
else:
crc_int = crc
existing_map = db.query(Map).filter(Map.crc == crc_int).first()
if not existing_map:
raise HTTPException(status_code=404, detail="Map not found")
return map_model_to_out_schema(existing_map)
@router.post("/maps", tags=["maps"], response_model=MapOut)
async def upload_map(map_in: MapIn, db: Session = Depends(get_db)):
filename = map_in.name
if not filename.lower().endswith(".gck"):
raise HTTPException(status_code=400, detail="Invalid file")
map_bytes = base64.b64decode(map_in.b64_data.encode("utf8"))
if len(map_bytes) > MAX_UPLOAD_SIZE:
raise HTTPException(status_code=400, detail="File too big")
map_io = io.BytesIO(map_bytes)
try:
zipfile.ZipFile(map_io)
except zipfile.BadZipfile:
raise HTTPException(status_code=400, detail="File is not a valid map")
crc = crc32(map_bytes)
existing_map = db.query(Map).filter(Map.crc == crc).first()
if existing_map:
return map_model_to_out_schema(existing_map)
else:
uploaded_filename = "%s.gck" % crc
with open("%s%s" % (config["upload_path"], uploaded_filename), "wb") as fp:
fp.write(map_bytes)
uploaded_map = Map()
uploaded_map.crc = crc
uploaded_map.name = map_in.name
uploaded_map.filename = uploaded_filename
uploaded_map.size = len(map_bytes)
db.add(uploaded_map)
db.commit()
db.refresh(uploaded_map)
return map_model_to_out_schema(uploaded_map)

19
app/schemas.py Normal file
View File

@ -0,0 +1,19 @@
import datetime
from pydantic import BaseModel
class MapOut(BaseModel):
crc: int
crc_human: str
name: str
size: int
upload_date: datetime.datetime
blob_location: str
class Config:
orm_mode = True
class MapIn(BaseModel):
name: str
b64_data: str

17
app/utils.py Normal file
View File

@ -0,0 +1,17 @@
import zlib
from .models import Map
from .schemas import MapOut
import copy
from .config import config
def crc32(bytes_in: bytes) -> int:
return zlib.crc32(bytes_in, 0) & 0xffffffff
def map_model_to_out_schema(map_in: Map) -> MapOut:
d = copy.deepcopy(map_in.__dict__)
d["crc_human"] = hex(map_in.crc)[2:].upper()
d["blob_location"] = "%s/%s%s" % (config["base_url"], config["upload_path"], map_in.filename)
map_out = MapOut(**d)
return map_out

5
config.json Normal file
View File

@ -0,0 +1,5 @@
{
"base_url": "https://gckmaps.hipstercat.fr",
"upload_path": "blobs/",
"max_file_size": 104857600
}

6
requirements.txt Normal file
View File

@ -0,0 +1,6 @@
fastapi~=0.65.1
uvicorn
sqlalchemy~=1.4.15
pydantic~=1.8.2
requests
aiofiles

24
test_upload_map.py Normal file
View File

@ -0,0 +1,24 @@
import requests
import os
import base64
import argparse
def read_file(path: str) -> bytes:
with open(path, "rb") as fp:
return fp.read()
def upload_map(map_path):
map_filename = os.path.basename(map_path)
content = read_file(map_path)
b64_content = base64.b64encode(content).decode("utf8")
r = requests.post("http://127.0.0.1:8000/maps", json={"name": map_filename, "b64_data": b64_content})
print(r.json())
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
upload_map(args.path)