diff --git a/olah/proxy/files.py b/olah/proxy/files.py index 9da6d91..4d23183 100644 --- a/olah/proxy/files.py +++ b/olah/proxy/files.py @@ -43,6 +43,7 @@ from olah.utils.rule_utils import check_cache_rules_hf from olah.utils.file_utils import make_dirs from olah.constants import CHUNK_SIZE, LFS_FILE_BLOCK, WORKER_API_TIMEOUT +from olah.utils.zip_utils import decompress_data def get_block_info(pos: int, block_size: int, file_size: int) -> Tuple[int, int, int]: @@ -141,38 +142,7 @@ async def _get_file_range_from_remote( yield raw_chunk chunk_bytes += len(raw_chunk) - # If result is compressed - if "content-encoding" in response.headers: - final_data = raw_data - algorithms = response.headers["content-encoding"].split(',') - for algo in algorithms: - algo = algo.strip().lower() - if algo == "gzip": - try: - final_data = zlib.decompress(raw_data, zlib.MAX_WBITS | 16) # 解压缩 - except Exception as e: - print(f"Error decompressing gzip data: {e}") - elif algo == "compress": - print(f"Unsupported decompression algorithm: {algo}") - elif algo == "deflate": - try: - final_data = zlib.decompress(raw_data) - except Exception as e: - print(f"Error decompressing deflate data: {e}") - elif algo == "br": - try: - import brotli - final_data = brotli.decompress(raw_data) - except Exception as e: - print(f"Error decompressing Brotli data: {e}") - elif algo == "zstd": - try: - import zstandard - final_data = zstandard.ZstdDecompressor().decompress(raw_data) - except Exception as e: - print(f"Error decompressing Zstandard data: {e}") - else: - print(f"Unsupported compression algorithm: {algo}") + final_data = decompress_data(response.headers.get("content-encoding", None)) chunk_bytes = len(final_data) yield final_data if "content-length" in response.headers: diff --git a/olah/server.py b/olah/server.py index 1a4f80a..70f9c23 100644 --- a/olah/server.py +++ b/olah/server.py @@ -32,6 +32,7 @@ from olah.proxy.tree import tree_generator from olah.utils.disk_utils import convert_bytes_to_human_readable, convert_to_bytes, get_folder_size, sort_files_by_access_time, sort_files_by_modify_time, sort_files_by_size from olah.utils.url_utils import clean_path +from olah.utils.zip_utils import decompress_data BASE_SETTINGS = False if not BASE_SETTINGS: @@ -709,10 +710,16 @@ async def whoami_v2(request: Request): headers=new_headers, timeout=10, ) + # final_content = decompress_data(response.headers.get("content-encoding", None)) + response_headers = {k.lower(): v for k, v in response.headers.items()} + if "content-encoding" in response_headers: + response_headers.pop("content-encoding") + if "content-length" in response_headers: + response_headers.pop("content-length") return Response( content=response.content, status_code=response.status_code, - headers=response.headers, + headers=response_headers, ) diff --git a/olah/utils/zip_utils.py b/olah/utils/zip_utils.py new file mode 100644 index 0000000..3476149 --- /dev/null +++ b/olah/utils/zip_utils.py @@ -0,0 +1,42 @@ + + +from typing import Optional +import zlib + + +def decompress_data(raw_data: bytes, content_encoding: Optional[str]): + # If result is compressed + if content_encoding is not None: + final_data = raw_data + algorithms = content_encoding.split(',') + for algo in algorithms: + algo = algo.strip().lower() + if algo == "gzip": + try: + final_data = zlib.decompress(raw_data, zlib.MAX_WBITS | 16) # 解压缩 + except Exception as e: + print(f"Error decompressing gzip data: {e}") + elif algo == "compress": + print(f"Unsupported decompression algorithm: {algo}") + elif algo == "deflate": + try: + final_data = zlib.decompress(raw_data) + except Exception as e: + print(f"Error decompressing deflate data: {e}") + elif algo == "br": + try: + import brotli + final_data = brotli.decompress(raw_data) + except Exception as e: + print(f"Error decompressing Brotli data: {e}") + elif algo == "zstd": + try: + import zstandard + final_data = zstandard.ZstdDecompressor().decompress(raw_data) + except Exception as e: + print(f"Error decompressing Zstandard data: {e}") + else: + print(f"Unsupported compression algorithm: {algo}") + return final_data + else: + return raw_data \ No newline at end of file