Add CSV export functionality for inventory, users, and worklogs with frontend integration
This commit is contained in:
parent
357d7196fa
commit
a050ff7c1e
7 changed files with 71 additions and 82 deletions
|
@ -1,7 +1,10 @@
|
|||
import base64
|
||||
import csv
|
||||
import hashlib
|
||||
import io
|
||||
import os
|
||||
|
||||
from flask import url_for
|
||||
from flask import url_for, jsonify
|
||||
|
||||
from ..models import Inventory
|
||||
|
||||
|
@ -82,3 +85,22 @@ def get_image_attachable_class_by_name(name: str):
|
|||
if getattr(cls, '__tablename__', None) == name:
|
||||
return cls
|
||||
return None
|
||||
|
||||
|
||||
def make_csv(export_func, columns, rows):
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(output)
|
||||
|
||||
writer.writerow(columns)
|
||||
|
||||
for row in rows:
|
||||
writer.writerow([export_func(row, col) for col in columns])
|
||||
|
||||
csv_string = output.getvalue()
|
||||
output.close()
|
||||
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"csv": base64.b64encode(csv_string.encode()).decode(),
|
||||
"count": len(rows)
|
||||
})
|
|
@ -7,7 +7,7 @@ from flask import request, render_template, url_for, jsonify
|
|||
from sqlalchemy.inspection import inspect
|
||||
|
||||
from . import main
|
||||
from .helpers import FILTER_MAP, inventory_headers, worklog_headers
|
||||
from .helpers import FILTER_MAP, inventory_headers, worklog_headers, make_csv
|
||||
|
||||
from .. import db
|
||||
from ..models import Inventory, User, Room, Item, RoomFunction, Brand, WorkLog
|
||||
|
@ -237,9 +237,6 @@ def get_inventory_csv():
|
|||
return jsonify({"success": False, "error": "No IDs provided"}), 400
|
||||
|
||||
rows = eager_load_inventory_relationships(db.session.query(Inventory).filter(Inventory.id.in_(ids))).all()
|
||||
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(output)
|
||||
|
||||
columns = [
|
||||
"id",
|
||||
|
@ -257,19 +254,7 @@ def get_inventory_csv():
|
|||
"shared"
|
||||
]
|
||||
|
||||
writer.writerow(columns)
|
||||
|
||||
for item in rows:
|
||||
writer.writerow([export_value(item, col) for col in columns])
|
||||
|
||||
csv_string = output.getvalue()
|
||||
output.close()
|
||||
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"csv": base64.b64encode(csv_string.encode()).decode(),
|
||||
"count": len(rows)
|
||||
})
|
||||
return make_csv(export_value, columns, rows)
|
||||
|
||||
@main.route("/inventory_available")
|
||||
def inventory_available():
|
||||
|
|
|
@ -5,7 +5,7 @@ import io
|
|||
from flask import render_template, request, jsonify
|
||||
|
||||
from . import main
|
||||
from .helpers import ACTIVE_STATUSES, user_headers, inventory_headers, worklog_headers
|
||||
from .helpers import ACTIVE_STATUSES, user_headers, inventory_headers, worklog_headers, make_csv
|
||||
from .. import db
|
||||
from ..utils.load import eager_load_user_relationships, eager_load_room_relationships, eager_load_inventory_relationships, eager_load_worklog_relationships
|
||||
from ..models import User, Room, Inventory, WorkLog
|
||||
|
@ -148,9 +148,6 @@ def get_user_csv():
|
|||
return jsonify({"success": False, "error": "No IDs provided"}), 400
|
||||
|
||||
rows = eager_load_user_relationships(db.session.query(User).filter(User.id.in_(ids))).all()
|
||||
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(output)
|
||||
|
||||
columns = [
|
||||
"id",
|
||||
|
@ -162,16 +159,4 @@ def get_user_csv():
|
|||
"supervisor"
|
||||
]
|
||||
|
||||
writer.writerow(columns)
|
||||
|
||||
for user in rows:
|
||||
writer.writerow([export_value(user, col) for col in columns])
|
||||
|
||||
csv_string = output.getvalue()
|
||||
output.close()
|
||||
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"csv": base64.b64encode(csv_string.encode()).decode(),
|
||||
"count": len(rows)
|
||||
})
|
||||
return make_csv(export_value, columns, rows)
|
||||
|
|
|
@ -6,7 +6,7 @@ import io
|
|||
from flask import request, render_template, jsonify
|
||||
|
||||
from . import main
|
||||
from .helpers import worklog_headers
|
||||
from .helpers import worklog_headers, make_csv
|
||||
from .. import db
|
||||
from ..models import WorkLog, User, Inventory, WorkNote
|
||||
from ..utils.load import eager_load_worklog_relationships, eager_load_user_relationships, eager_load_inventory_relationships
|
||||
|
@ -172,9 +172,6 @@ def get_worklog_csv():
|
|||
return jsonify({"success": False, "error": "No IDs provided"}), 400
|
||||
|
||||
rows = eager_load_worklog_relationships(db.session.query(WorkLog).filter(WorkLog.id.in_(ids))).all()
|
||||
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(output)
|
||||
|
||||
columns = [
|
||||
"id",
|
||||
|
@ -188,16 +185,10 @@ def get_worklog_csv():
|
|||
"latest_update"
|
||||
]
|
||||
|
||||
writer.writerow(columns)
|
||||
return make_csv(export_value, columns, rows)
|
||||
|
||||
for user in rows:
|
||||
writer.writerow([export_value(user, col) for col in columns])
|
||||
|
||||
csv_string = output.getvalue()
|
||||
output.close()
|
||||
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"csv": base64.b64encode(csv_string.encode()).decode(),
|
||||
"count": len(rows)
|
||||
})
|
||||
# return jsonify({
|
||||
# "success": True,
|
||||
# "csv": base64.b64encode(csv_string.encode()).decode(),
|
||||
# "count": len(rows)
|
||||
# })
|
35
inventory/static/js/csv.js
Normal file
35
inventory/static/js/csv.js
Normal file
|
@ -0,0 +1,35 @@
|
|||
async function export_csv(ids, csv_route) {
|
||||
const payload = ids;
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/${csv_route}/export`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json"
|
||||
},
|
||||
body: JSON.stringify(payload)
|
||||
});
|
||||
|
||||
const result = await response.json();
|
||||
|
||||
if (result.success) {
|
||||
const decodedCsv = atob(result.csv);
|
||||
const blob = new Blob([decodedCsv], { type: "text/csv" });
|
||||
const url = URL.createObjectURL(blob);
|
||||
|
||||
const link = document.createElement("a");
|
||||
link.href = url;
|
||||
link.download = `${csv_route}_export.csv`;
|
||||
link.click();
|
||||
|
||||
console.log(url);
|
||||
|
||||
URL.revokeObjectURL(url);
|
||||
} else {
|
||||
renderToast({ message: `Export failed: ${result.error}`, type: 'danger' });
|
||||
}
|
||||
} catch (err) {
|
||||
renderToast({ message: `Export failed: ${err}`, type: 'danger' });
|
||||
}
|
||||
}
|
|
@ -74,6 +74,7 @@
|
|||
integrity="sha384-zqgMe4cx+N3TuuqXt4kWWDluM5g1CiRwqWBm3vpvY0GcDoXTwU8d17inavaLy3p3"
|
||||
crossorigin="anonymous"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/marked/lib/marked.umd.js"></script>
|
||||
<script src="{{ url_for('static', filename='js/csv.js') }}"></script>
|
||||
<script src="{{ url_for('static', filename='js/widget.js') }}"></script>
|
||||
<script>
|
||||
const searchInput = document.querySelector('#search');
|
||||
|
|
|
@ -10,37 +10,7 @@
|
|||
{% set exportButtonLogic %}
|
||||
const payload = {ids: [{% for row in rows %}{{ row['id'] }}, {% endfor %}]}
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/{{ csv_route }}/export', {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json"
|
||||
},
|
||||
body: JSON.stringify(payload)
|
||||
});
|
||||
|
||||
const result = await response.json();
|
||||
|
||||
if (result.success) {
|
||||
const decodedCsv = atob(result.csv);
|
||||
const blob = new Blob([decodedCsv], { type: "text/csv" });
|
||||
const url = URL.createObjectURL(blob);
|
||||
|
||||
const link = document.createElement("a");
|
||||
link.href = url;
|
||||
link.download = "{{ csv_route }}_export.csv";
|
||||
link.click();
|
||||
|
||||
console.log(url);
|
||||
|
||||
URL.revokeObjectURL(url);
|
||||
} else {
|
||||
renderToast({ message: `Export failed: ${result.error}`, type: 'danger' });
|
||||
}
|
||||
} catch (err) {
|
||||
renderToast({ message: `Export failed: ${err}`, type: 'danger' });
|
||||
}
|
||||
export_csv(payload, '{{ csv_route }}');
|
||||
{% endset %}
|
||||
{% set toolbarButtons %}
|
||||
<div class="btn-group">
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue