feat: transparently append to compressed archives
When appending to a compressed archive (gzip, brotli, zstd), the tool now handles compression automatically. Since some compression formats don't support appending to compressed files in place, we write a new compressed file with all the data and atomically rename it to replace the original (assuming there is enough space on that filesystem). This means you can work with compressed archives the same way as uncompressed ones. Point the tool at your .json.gz file and append values. No manual decompression/recompression needed.
This commit is contained in:
parent
da0fed29de
commit
2ab1c31993
34 changed files with 4747 additions and 1099 deletions
39
tests/compression-integration/generate_state_files.py
Executable file
39
tests/compression-integration/generate_state_files.py
Executable file
|
|
@ -0,0 +1,39 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Generate a series of state files with progressively more items.
|
||||
|
||||
Usage: ./generate_state_files.py <count> <output_dir>
|
||||
|
||||
Creates: output_dir/state_1.json, state_2.json, ..., state_N.json
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
def generate_state(n):
|
||||
return {
|
||||
"colors": [f"color_{i}" for i in range(1, n + 1)],
|
||||
"numbers": [f"number_{i}" for i in range(1, n + 1)],
|
||||
"animals": [f"animal_{i}" for i in range(1, n + 1)],
|
||||
}
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
print("Usage: generate_state_files.py <count> <output_dir>", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
count = int(sys.argv[1])
|
||||
output_dir = sys.argv[2]
|
||||
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
for i in range(1, count + 1):
|
||||
state = generate_state(i)
|
||||
path = os.path.join(output_dir, f"state_{i}.json")
|
||||
with open(path, "w") as f:
|
||||
json.dump(state, f)
|
||||
print(f"Created {path}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Loading…
Add table
Add a link
Reference in a new issue