Export group by calibration
This commit is contained in:
parent
48e1f82bc7
commit
c7e1e423f5
50
app.py
50
app.py
|
|
@ -1,6 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
from flask import Flask, redirect, request, render_template, send_from_directory, session
|
from flask import Flask, redirect, request, render_template, send_from_directory, session
|
||||||
|
import itertools
|
||||||
import io
|
import io
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
@ -275,8 +276,53 @@ def download_object(id: int):
|
||||||
conn = db.get()
|
conn = db.get()
|
||||||
object = db.Object.get_from_id(id, conn).full(conn)
|
object = db.Object.get_from_id(id, conn).full(conn)
|
||||||
|
|
||||||
|
# Group acquisitions sharing calibration
|
||||||
|
|
||||||
|
def keyfunc(x: db.Calibration) -> int:
|
||||||
|
return x.calibration_id
|
||||||
|
|
||||||
|
acquisitions_sorted = sorted(object.acquisitions, key=keyfunc)
|
||||||
|
acquisitions_grouped = [(db.Calibration.get_from_id(k, conn), list(g)) for k, g in itertools.groupby(acquisitions_sorted, key=keyfunc)]
|
||||||
|
|
||||||
def generate():
|
def generate():
|
||||||
for acquisition_index, acquisition in enumerate(object.acquisitions):
|
for calibration_index, (calib, acquisitions) in enumerate(acquisitions_grouped):
|
||||||
|
# Send each image
|
||||||
|
calibration_dir = join(config.CALIBRATION_DIR, str(calib.id))
|
||||||
|
for image in os.listdir(calibration_dir):
|
||||||
|
|
||||||
|
# Generate tar header for file
|
||||||
|
image_path = join(calibration_dir, image)
|
||||||
|
bytes = io.BytesIO()
|
||||||
|
stat = os.stat(image_path)
|
||||||
|
|
||||||
|
# Create dummy tar to extract tar header for file
|
||||||
|
with tarfile.open(fileobj=bytes, mode='w') as buffer:
|
||||||
|
tar_info = tarfile.TarInfo(image_path)
|
||||||
|
tar_info.name = f'object/{calibration_index}/calibration/{image}'
|
||||||
|
tar_info.size = stat.st_size
|
||||||
|
buffer.addfile(tar_info)
|
||||||
|
|
||||||
|
# Yield header
|
||||||
|
value = bytes.getvalue()
|
||||||
|
yield value[:512]
|
||||||
|
|
||||||
|
# Yield file content, by chunks of 4MiB
|
||||||
|
chunk_size = 4_194_304
|
||||||
|
bytes_len = 0
|
||||||
|
|
||||||
|
with open(image_path, 'rb') as file:
|
||||||
|
while True:
|
||||||
|
bytes = file.read(chunk_size)
|
||||||
|
|
||||||
|
if len(bytes) == 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
bytes_len += len(bytes)
|
||||||
|
yield bytes
|
||||||
|
|
||||||
|
yield b'\x00' * (512 - bytes_len % 512)
|
||||||
|
|
||||||
|
for acquisition_index, acquisition in enumerate(acquisitions):
|
||||||
acquisition_dir = join(config.OBJECT_DIR, str(object.id), str(acquisition.id))
|
acquisition_dir = join(config.OBJECT_DIR, str(object.id), str(acquisition.id))
|
||||||
|
|
||||||
# Send each image
|
# Send each image
|
||||||
|
|
@ -290,7 +336,7 @@ def download_object(id: int):
|
||||||
# Create dummy tar to extract tar header for file
|
# Create dummy tar to extract tar header for file
|
||||||
with tarfile.open(fileobj=bytes, mode='w') as buffer:
|
with tarfile.open(fileobj=bytes, mode='w') as buffer:
|
||||||
tar_info = tarfile.TarInfo(image_path)
|
tar_info = tarfile.TarInfo(image_path)
|
||||||
tar_info.name = f'object/{acquisition_index}/{image}'
|
tar_info.name = f'object/{calibration_index}/{acquisition_index}/{image}'
|
||||||
tar_info.size = stat.st_size
|
tar_info.size = stat.st_size
|
||||||
buffer.addfile(tar_info)
|
buffer.addfile(tar_info)
|
||||||
|
|
||||||
|
|
|
||||||
1
db.py
1
db.py
|
|
@ -245,7 +245,6 @@ class Object:
|
||||||
[self.id]
|
[self.id]
|
||||||
)
|
)
|
||||||
acquisitions = list(map(lambda x: Acquisition.from_row(x), response.fetchall()))
|
acquisitions = list(map(lambda x: Acquisition.from_row(x), response.fetchall()))
|
||||||
print(acquisitions)
|
|
||||||
return FullObject(self.id, self.name, acquisitions)
|
return FullObject(self.id, self.name, acquisitions)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue